diff --git a/ios-broadcast-upload-extension/Classes/BBBSampleHandler.swift b/ios-broadcast-upload-extension/Classes/BBBSampleHandler.swift index 09c0155..a3f8bf6 100644 --- a/ios-broadcast-upload-extension/Classes/BBBSampleHandler.swift +++ b/ios-broadcast-upload-extension/Classes/BBBSampleHandler.swift @@ -131,6 +131,7 @@ open class BBBSampleHandler : RPBroadcastSampleHandler { switch sampleBufferType { case RPSampleBufferType.video: logger.trace("ReplayKit2 event - processSampleBuffer(video)") + self.screenBroadcaster?.pushVideoFrame(sampleBuffer: sampleBuffer) break case RPSampleBufferType.audioApp: logger.trace("ReplayKit2 event - processSampleBuffer(audioApp)") diff --git a/ios-broadcast-upload-extension/Classes/ScreenBroadcaster.swift b/ios-broadcast-upload-extension/Classes/ScreenBroadcaster.swift index d1f2555..b08d332 100644 --- a/ios-broadcast-upload-extension/Classes/ScreenBroadcaster.swift +++ b/ios-broadcast-upload-extension/Classes/ScreenBroadcaster.swift @@ -13,6 +13,7 @@ open class ScreenBroadcaster { private var webRTCClient:WebRTCClient private var appGroupName:String private let encoder = JSONEncoder() + public var isConnected:Bool = false init(appGroupName: String) { self.appGroupName = appGroupName @@ -55,6 +56,20 @@ open class ScreenBroadcaster { } } + public func pushVideoFrame(sampleBuffer: CMSampleBuffer) -> Void { + if(!isConnected) { + self.logger.info("Ignoring pushVideoFrame - not connected") + } else { + self.logger.info("pushing video") + let imageBuffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000) + let rtcPixlBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer) + let rtcVideoFrame = RTCVideoFrame(buffer: rtcPixlBuffer, rotation: ._0, timeStampNs: timeStampNs) + self.webRTCClient.push(videoFrame: rtcVideoFrame) + self.logger.info("video pushed") + } + } + } extension ScreenBroadcaster: WebRTCClientDelegate { @@ -133,6 +148,8 @@ extension ScreenBroadcaster: WebRTCClientDelegate { self.logger.error("peerConnection new signaling state -> UNKNOWN") } + self.isConnected = true + BBBSharedData .getUserDefaults(appGroupName: self.appGroupName) .set(BBBSharedData.generatePayload(properties: [ diff --git a/ios-common/Classes/WebRTCClient.swift b/ios-common/Classes/WebRTCClient.swift index 96a12aa..da4cf0e 100644 --- a/ios-common/Classes/WebRTCClient.swift +++ b/ios-common/Classes/WebRTCClient.swift @@ -96,11 +96,11 @@ open class WebRTCClient: NSObject { // MARK: Media - /*func push(videoFrame: RTCVideoFrame) { + public func push(videoFrame: RTCVideoFrame) { guard videoCapturer != nil, videoSource != nil else { return } videoSource!.capturer(videoCapturer!, didCapture: videoFrame) print("RTCVideoFrame pushed to server.") - }*/ + } /*private func configureAudioSession() { self.rtcAudioSession.lockForConfiguration() @@ -134,9 +134,13 @@ open class WebRTCClient: NSObject { }*/ private func createVideoTrack() -> RTCVideoTrack { + let targetWidth:Int32 = 600; + let targetHeight:Int32 = targetWidth * Int32(UIScreen.main.fixedCoordinateSpace.bounds.height / UIScreen.main.fixedCoordinateSpace.bounds.width) + + videoSource = WebRTCClient.factory.videoSource(forScreenCast: true) videoCapturer = RTCVideoCapturer(delegate: videoSource!) - videoSource!.adaptOutputFormat(toWidth: 600, height: 800, fps: 15) + videoSource!.adaptOutputFormat(toWidth: targetWidth, height: targetHeight, fps: 15) let videoTrack = WebRTCClient.factory.videoTrack(with: videoSource!, trackId: "video0") videoTrack.isEnabled = true return videoTrack