You can achieve what you want by recording 30s chunks of video, then enqueueing them to an AVQueuePlayer
for seamless playback. Recording the video chunks would be very easy with AVCaptureFileOutput
on macOS, but sadly, on iOS you cannot create new chunks without dropping frames, so you have to use the wordier, lower level AVAssetWriter
API:
import UIKit
import AVFoundation
// TODO: delete old videos
// TODO: audio
class ViewController: UIViewController {
// capture
let captureSession = AVCaptureSession()
// playback
let player = AVQueuePlayer()
var playerLayer: AVPlayerLayer! = nil
// output. sadly not AVCaptureMovieFileOutput
var assetWriter: AVAssetWriter! = nil
var assetWriterInput: AVAssetWriterInput! = nil
var chunkNumber = 0
var chunkStartTime: CMTime! = nil
var chunkOutputURL: URL! = nil
override func viewDidLoad() {
super.viewDidLoad()
playerLayer = AVPlayerLayer(player: player)
view.layer.addSublayer(playerLayer)
// inputs
let videoCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let videoInput = try! AVCaptureDeviceInput(device: videoCaptureDevice)
captureSession.addInput(videoInput)
// outputs
// iOS AVCaptureFileOutput/AVCaptureMovieFileOutput still don't support dynamically
// switching files (?) so we have to re-implement with AVAssetWriter
let videoOutput = AVCaptureVideoDataOutput()
// TODO: probably something else
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
captureSession.addOutput(videoOutput)
captureSession.startRunning()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
playerLayer.frame = view.layer.bounds
}
func createWriterInput(for presentationTimeStamp: CMTime) {
let fileManager = FileManager.default
chunkOutputURL = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("chunk\(chunkNumber).mov")
try? fileManager.removeItem(at: chunkOutputURL)
assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: AVFileTypeQuickTimeMovie)
// TODO: get dimensions from image CMSampleBufferGetImageBuffer(sampleBuffer)
let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1920, AVVideoHeightKey: 1080]
assetWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
assetWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(assetWriterInput)
chunkNumber += 1
chunkStartTime = presentationTimeStamp
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: chunkStartTime)
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter == nil {
createWriterInput(for: presentationTimeStamp)
} else {
let chunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))
if chunkDuration > 30 {
assetWriter.endSession(atSourceTime: presentationTimeStamp)
// make a copy, as finishWriting is asynchronous
let newChunkURL = chunkOutputURL!
let chunkAssetWriter = assetWriter!
chunkAssetWriter.finishWriting {
print("finishWriting says: \(chunkAssetWriter.status.rawValue, chunkAssetWriter.error)")
print("queuing \(newChunkURL)")
self.player.insert(AVPlayerItem(url: newChunkURL), after: nil)
self.player.play()
}
createWriterInput(for: presentationTimeStamp)
}
}
if !assetWriterInput.append(sampleBuffer) {
print("append says NO: \(assetWriter.status.rawValue, assetWriter.error)")
}
}
}
p.s. it's very curious to see what you were doing 30 seconds ago. What exactly are you making?