本文介绍了快速使用 WebRTC 的 iOS 屏幕共享 (ReplayKit)的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我实现了用于视频通话的 webrtc SDK 并且它工作正常.在视频通话期间,用户可以与其他用户共享屏幕.我正在使用 RePlayKit 进行屏幕共享.

I implemented webrtc SDK for video calling and its working fine . During video call user can share screen with another user.I am using RePlayKit for screen sharing.

这是我的代码

class SampleHandler: RPBroadcastSampleHandler {

    var peerConnectionFactory: RTCPeerConnectionFactory?
     var localVideoSource: RTCVideoSource?
     var videoCapturer: RTCVideoCapturer?
     var peerConnection: RTCPeerConnection?
    var localVideoTrack: RTCVideoTrack?

    var disconnectSemaphore: DispatchSemaphore?
    var videodelegate:VideoViewExtensionDelegate?
    var signalClient: SignalingClient? = nil
    let config = Config.default


    let peerConnectionfactory: RTCPeerConnectionFactory = {
        RTCInitializeSSL()
        let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
        let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
        return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
    }()

    private let mediaConstrains = [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueFalse,
    kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue]


    static let kAudioSampleType = RPSampleBufferType.audioMic


    override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {

        self.SetupVideo()
    }
    override func broadcastPaused() {
        // User has requested to pause the broadcast. Samples will stop being delivered.
//        self.audioTrack?.isEnabled = false
//        self.screenTrack?.isEnabled = false
    }

    override func broadcastResumed() {
        // User has requested to resume the broadcast. Samples delivery will resume.
//        self.audioTrack?.isEnabled = true
//        self.screenTrack?.isEnabled = true
    }

    override func broadcastFinished() {
        // User has requested to finish the broadcast.
    }

    func SetupVideo() {

        if #available(iOS 13.0, *) {
                let webSocketProvider: WebSocketProvider
                webSocketProvider = NativeWebSocket(url: self.config.signalingServerUrl)

        self.signalClient = SignalingClient(webSocket: webSocketProvider)
           let config = RTCConfiguration()
                     // config.iceServers = [RTCIceServer(urlStrings: iceServers)]

                      config.iceServers = [RTCIceServer(urlStrings:["//turn & sturn serber url"],
                                                        username:"//username",
                                                        credential:"//password")]


                      // Unified plan is more superior than planB
                     // config.sdpSemantics = .unifiedPlan

                      // gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client
            config.continualGatheringPolicy = .gatherContinually

           let screenSharefactory = self.peerConnectionfactory

            let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
                                                            optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
        self.peerConnection = screenSharefactory.peerConnection(with: config, constraints: constraints, delegate: nil)

        self.peerConnection?.delegate = self


        self.localVideoSource = screenSharefactory.videoSource()
        self.videoCapturer = RTCVideoCapturer(delegate: self.localVideoSource!)
        self.localVideoTrack =  screenSharefactory.videoTrack(with: self.localVideoSource!, trackId:"video0")

            //    let videoSender = newpeerConnection.sender(withKind: kRTCMediaStreamTrackKindVideo, streamId: "stream")
            //    videoSender.track = videoTrack

                let mediaStream: RTCMediaStream = (screenSharefactory.mediaStream(withStreamId: "1"))
                mediaStream.addVideoTrack(self.localVideoTrack!)
               self.peerConnection?.add(mediaStream)

                self.offer(peerconnection: self.peerConnection!) { (sdp) in
                    self.signalClient?.send(sdp: sdp)
                }

        }
    }


    func offer(peerconnection : RTCPeerConnection ,completion: @escaping (_ sdp: RTCSessionDescription) -> Void) {


        let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstrains,
                                             optionalConstraints: nil)
        peerconnection.offer(for: constrains) { (sdp, error) in
            guard let sdp = sdp else {
                return
            }

            peerconnection.setLocalDescription(sdp, completionHandler: { (error) in
                completion(sdp)
            })
        }
    }

    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {

        switch sampleBufferType {
        case RPSampleBufferType.video:

          guard let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                  break
              }
          let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer)
          let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
          let videoFrame =  RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
            print(videoFrame)
            self.localVideoSource?.capturer(self.videoCapturer!, didCapture: videoFrame)



            break

        case RPSampleBufferType.audioApp:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioApp) {
            //    ExampleCoreAudioDeviceCapturerCallback(audioDevice, sampleBuffer)
            }
            break

        case RPSampleBufferType.audioMic:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioMic) {

            }
            break
        @unknown default:
            return
        }
    }
}

extension SampleHandler: RTCPeerConnectionDelegate {

    func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
        debugPrint("peerConnection new signaling state: \(stateChanged)")
    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
        debugPrint("peerConnection did add stream")
    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
        debugPrint("peerConnection did remote stream")
    }

    func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
        debugPrint("peerConnection should negotiate")
    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
        debugPrint("peerConnection new connection state: \(newState)")

    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
        debugPrint("peerConnection new gathering state: \(newState)")
    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
        debugPrint("peerConnection did Generate")

    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
        debugPrint("peerConnection did remove candidate(s)")
    }

    func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
        debugPrint("peerConnection did open data channel")
       // self.remoteDataChannel = dataChannel
    }
}


extension SampleHandler: RTCDataChannelDelegate {
    func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
        debugPrint("dataChannel did change state: \(dataChannel.readyState)")
    }

    func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) {

    }
}

我正在使用这个 WEBRTC 项目 https://github.com/stasel/WebRTC-iOS我正在获取 CMSampleBuffer 数据和 RTCVideoFrame 并正确传递.CMSampleBuffer 数据供参考.

I am using this WEBRTC project https://github.com/stasel/WebRTC-iOSI am getting the CMSampleBuffer data and RTCVideoFrame and passing correctly.CMSampleBuffer Data for refrence.

CMSampleBuffer 0x100918370 retainCount: 5 allocator: 0x1e32175e0
    invalid = NO
    dataReady = YES
    makeDataReadyCallback = 0x0
    makeDataReadyRefcon = 0x0
    formatDescription = <CMAudioFormatDescription 0x282bf0e60 [0x1e32175e0]> {
    mediaType:'soun'
    mediaSubType:'lpcm'
    mediaSpecific: {
        ASBD: {
            mSampleRate: 44100.000000
            mFormatID: 'lpcm'
            mFormatFlags: 0xe
            mBytesPerPacket: 4
            mFramesPerPacket: 1
            mBytesPerFrame: 4
            mChannelsPerFrame: 2
            mBitsPerChannel: 16     }
        cookie: {(null)}
        ACL: {(null)}
        FormatList Array: {
            Index: 0
            ChannelLayoutTag: 0x650002
            ASBD: {
            mSampleRate: 44100.000000
            mFormatID: 'lpcm'
            mFormatFlags: 0xe
            mBytesPerPacket: 4
            mFramesPerPacket: 1
            mBytesPerFrame: 4
            mChannelsPerFrame: 2
            mBitsPerChannel: 16     }}
    }
    extensions: {(null)}
}
    sbufToTrackReadiness = 0x0
    numSamples = 1024
    outputPTS = {190371138262458/1000000000 = 190371.138}(based on cachedOutputPresentationTimeStamp)
    sampleTimingArray[1] = {
        {PTS = {190371138262458/1000000000 = 190371.138}, DTS = {INVALID}, duration = {1/44100 = 0.000}},
    }
    dataBuffer = 0x2828f1050

我被困在这里,不知道我的代码有什么问题.非常感谢任何帮助.

I am stuck here ,don't know what is the wrong with my code.ANy help is highly appreciated.

推荐答案

webrtc 是对等连接.如果您想与另一个人共享您的屏幕.您必须从屏幕创建 cvpixelBuffer(使用名为 RTCCustomcaptureframe 的类)并创建 webrtcclient 以连接另一个设备.(对于更简单的设置webrtc客户端,只需将其拆分)

webrtc is peer to peer connection. If you want to share your screen with another one.You have to create cvpixelBuffer from screen (use a class called RTCCustomcaptureframe) and create webrtcclient to connect with another device. (For simpler setup webrtc client, just split it)

一个对等连接不能连接 3 个设备.

You cannot connect 3 device with a single peer connection.

这篇关于快速使用 WebRTC 的 iOS 屏幕共享 (ReplayKit)的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!