iOS screen sharing (ReplayKit) using WebRTC in swift

落花浮王杯 提交于 2020-07-10 10:25:44

问题


I implemented webrtc SDK for video calling and its working fine . During video call user can share screen with another user. I am using RePlayKit for screen sharing.

Here is my code

class SampleHandler: RPBroadcastSampleHandler {
       
    var peerConnectionFactory: RTCPeerConnectionFactory?
     var localVideoSource: RTCVideoSource?
     var videoCapturer: RTCVideoCapturer?
     var peerConnection: RTCPeerConnection?
    var localVideoTrack: RTCVideoTrack?

    var disconnectSemaphore: DispatchSemaphore?
    var videodelegate:VideoViewExtensionDelegate?
    var signalClient: SignalingClient? = nil
    let config = Config.default
   
    
    let peerConnectionfactory: RTCPeerConnectionFactory = {
        RTCInitializeSSL()
        let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
        let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
        return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
    }()
    
    private let mediaConstrains = [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueFalse,
    kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue]


    static let kAudioSampleType = RPSampleBufferType.audioMic


    override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
        
        self.SetupVideo()
    }
    override func broadcastPaused() {
        // User has requested to pause the broadcast. Samples will stop being delivered.
//        self.audioTrack?.isEnabled = false
//        self.screenTrack?.isEnabled = false
    }

    override func broadcastResumed() {
        // User has requested to resume the broadcast. Samples delivery will resume.
//        self.audioTrack?.isEnabled = true
//        self.screenTrack?.isEnabled = true
    }

    override func broadcastFinished() {
        // User has requested to finish the broadcast.
    }
    
    func SetupVideo() {

        if #available(iOS 13.0, *) {
                let webSocketProvider: WebSocketProvider
                webSocketProvider = NativeWebSocket(url: self.config.signalingServerUrl)
        
        self.signalClient = SignalingClient(webSocket: webSocketProvider)
           let config = RTCConfiguration()
                     // config.iceServers = [RTCIceServer(urlStrings: iceServers)]
                      
                      config.iceServers = [RTCIceServer(urlStrings:["//turn & sturn serber url"],
                                                        username:"//username",
                                                        credential:"//password")]
                      
                      
                      // Unified plan is more superior than planB
                     // config.sdpSemantics = .unifiedPlan
                      
                      // gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client
            config.continualGatheringPolicy = .gatherContinually
                
           let screenSharefactory = self.peerConnectionfactory
                      
            let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
                                                            optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
        self.peerConnection = screenSharefactory.peerConnection(with: config, constraints: constraints, delegate: nil)
                
        self.peerConnection?.delegate = self


        self.localVideoSource = screenSharefactory.videoSource()
        self.videoCapturer = RTCVideoCapturer(delegate: self.localVideoSource!)
        self.localVideoTrack =  screenSharefactory.videoTrack(with: self.localVideoSource!, trackId:"video0")
         
            //    let videoSender = newpeerConnection.sender(withKind: kRTCMediaStreamTrackKindVideo, streamId: "stream")
            //    videoSender.track = videoTrack
                
                let mediaStream: RTCMediaStream = (screenSharefactory.mediaStream(withStreamId: "1"))
                mediaStream.addVideoTrack(self.localVideoTrack!)
               self.peerConnection?.add(mediaStream)
                
                self.offer(peerconnection: self.peerConnection!) { (sdp) in
                    self.signalClient?.send(sdp: sdp)
                }
            
        }
    }
    
    
    func offer(peerconnection : RTCPeerConnection ,completion: @escaping (_ sdp: RTCSessionDescription) -> Void) {

        
        let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstrains,
                                             optionalConstraints: nil)
        peerconnection.offer(for: constrains) { (sdp, error) in
            guard let sdp = sdp else {
                return
            }
            
            peerconnection.setLocalDescription(sdp, completionHandler: { (error) in
                completion(sdp)
            })
        }
    }

    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {

        switch sampleBufferType {
        case RPSampleBufferType.video:

          guard let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                  break
              }
          let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer)
          let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
          let videoFrame =  RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
            print(videoFrame)
            self.localVideoSource?.capturer(self.videoCapturer!, didCapture: videoFrame)



            break

        case RPSampleBufferType.audioApp:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioApp) {
            //    ExampleCoreAudioDeviceCapturerCallback(audioDevice, sampleBuffer)
            }
            break

        case RPSampleBufferType.audioMic:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioMic) {

            }
            break
        @unknown default:
            return
        }
    }
}

extension SampleHandler: RTCPeerConnectionDelegate {
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
        debugPrint("peerConnection new signaling state: \(stateChanged)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
        debugPrint("peerConnection did add stream")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
        debugPrint("peerConnection did remote stream")
    }
    
    func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
        debugPrint("peerConnection should negotiate")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
        debugPrint("peerConnection new connection state: \(newState)")
       
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
        debugPrint("peerConnection new gathering state: \(newState)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
        debugPrint("peerConnection did Generate")

    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
        debugPrint("peerConnection did remove candidate(s)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
        debugPrint("peerConnection did open data channel")
       // self.remoteDataChannel = dataChannel
    }
}


extension SampleHandler: RTCDataChannelDelegate {
    func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
        debugPrint("dataChannel did change state: \(dataChannel.readyState)")
    }
    
    func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) {
       
    }
}

I am using this WEBRTC project https://github.com/stasel/WebRTC-iOS I am getting the CMSampleBuffer data and RTCVideoFrame and passing correctly. CMSampleBuffer Data for refrence.

CMSampleBuffer 0x100918370 retainCount: 5 allocator: 0x1e32175e0
    invalid = NO
    dataReady = YES
    makeDataReadyCallback = 0x0
    makeDataReadyRefcon = 0x0
    formatDescription = <CMAudioFormatDescription 0x282bf0e60 [0x1e32175e0]> {
    mediaType:'soun' 
    mediaSubType:'lpcm' 
    mediaSpecific: {
        ASBD: {
            mSampleRate: 44100.000000 
            mFormatID: 'lpcm' 
            mFormatFlags: 0xe 
            mBytesPerPacket: 4 
            mFramesPerPacket: 1 
            mBytesPerFrame: 4 
            mChannelsPerFrame: 2 
            mBitsPerChannel: 16     } 
        cookie: {(null)} 
        ACL: {(null)}
        FormatList Array: {
            Index: 0 
            ChannelLayoutTag: 0x650002 
            ASBD: {
            mSampleRate: 44100.000000 
            mFormatID: 'lpcm' 
            mFormatFlags: 0xe 
            mBytesPerPacket: 4 
            mFramesPerPacket: 1 
            mBytesPerFrame: 4 
            mChannelsPerFrame: 2 
            mBitsPerChannel: 16     }} 
    } 
    extensions: {(null)}
}
    sbufToTrackReadiness = 0x0
    numSamples = 1024
    outputPTS = {190371138262458/1000000000 = 190371.138}(based on cachedOutputPresentationTimeStamp)
    sampleTimingArray[1] = {
        {PTS = {190371138262458/1000000000 = 190371.138}, DTS = {INVALID}, duration = {1/44100 = 0.000}},
    }
    dataBuffer = 0x2828f1050

I am stuck here ,don't know what is the wrong with my code.ANy help is highly appreciated.


回答1:


webrtc is peer to peer connection. If you want to share your screen with another one. You have to create cvpixelBuffer from screen (use a class called RTCCustomcaptureframe) and create webrtcclient to connect with another device. (For simpler setup webrtc client, just split it)

You cannot connect 3 device with a single peer connection.



来源:https://stackoverflow.com/questions/62775262/ios-screen-sharing-replaykit-using-webrtc-in-swift

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!