I'm trying to live stream contents of iPhones screen over wifi to local server with ReplayKit broadcast extension. How can I send CMSampleBuffers encoded with VideoToolbox to server on LAN and live play the stream on it?
import ReplayKit
import VideoToolbox
import Network
class SampleHandler: RPBroadcastSampleHandler {
let ip = "192.168.0.247"
let pt = 9999
var compressionSession: UnsafeMutablePointer<VTCompressionSession?> = .allocate(capacity: 1)
var connection: NWConnection?
override init() {
super.init()
connection = NWConnection(host: NWEndpoint.Host(ip), port: NWEndpoint.Port(rawValue: NWEndpoint.Port.RawValue(pt))!, using: .udp)
connection?.start(queue: .main)
}
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
VTCompressionSessionCreate(allocator: nil, width: 1000, height: 2000, codecType: kCMVideoCodecType_HEVC, encoderSpecification: nil, imageBufferAttributes: nil, compressedDataAllocator: nil, outputCallback: nil, refcon: nil, compressionSessionOut: compressionSession)
}
override func broadcastPaused() {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
override func broadcastResumed() {
// User has requested to resume the broadcast. Samples delivery will resume.
}
override func broadcastFinished() {
// User has requested to finish the broadcast.
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case RPSampleBufferType.video:
var imageBuffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
var pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) as CMTime
VTCompressionSessionEncodeFrame(compressionSession.pointee!, imageBuffer: imageBuffer, presentationTimeStamp: pts, duration: CMTime.invalid, frameProperties: nil, infoFlagsOut: nil, outputHandler: {(status: OSStatus, flags: VTEncodeInfoFlags, buffer: CMSampleBuffer?) -> Void in
//what can I use to send the encoded data here?
})
// Handle video sample buffer
break
case RPSampleBufferType.audioApp:
// Handle audio sample buffer for app audio
break
case RPSampleBufferType.audioMic:
// Handle audio sample buffer for mic audio
break
@unknown default:
// Handle other sample buffer types
fatalError("Unknown type of sample buffer")
}
}
}