I’m currently building a camera app. It has a segmented control to switch between photo and video. The video stream displays for the photos ViewController but when I switch to the video ViewController the video stream does not display? I’ve been trying everything I can but haven’t figured out why the video stream is not displaying when I toggle the segmented control to video. I put a color on the PreviewLayer to make sure it’s there, I see it on the simulator and it displays on the phone, but the video stream doesn’t display.
If anyone has experience developing camera apps I’d really appreciate the help.
This is code from the project where the preview layer is referenced:
class VideoViewController: UIViewController {
var isRecording: Bool = false
private var permissionGranted = false
var videoSession = AVCaptureSession()
var captureSession = AVCaptureSession()
var movieOutput = AVCaptureMovieFileOutput()
let captureSessionQueue = DispatchQueue(label: "captureSessionQueue")
let previewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .blue
view.layer.addSublayer(previewLayer)
previewLayer.backgroundColor = UIColor.systemPink.cgColor
view.addSubview(startVideoButton)
view.addSubview(stopVideoButton)
stopVideoButton.isHidden = true
startVideoButton.addTarget(self, action: #selector(startRecording), for: .touchUpInside)
stopVideoButton.addTarget(self, action: #selector(stopRecording), for: .touchUpInside)
checkCameraPermissions()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
previewLayer.frame = view.bounds
startVideoButton.center = CGPoint(x: view.frame.size.width / 2, y: view.frame.size.height - 80)
stopVideoButton.center = CGPoint(x: view.frame.size.width / 2, y: view.frame.size.height - 80)
}
private func setUpVideoCamera() {
guard permissionGranted else { return }
videoSession.beginConfiguration()
videoSession.sessionPreset = .high
let camera = AVCaptureDevice.default(for: .video)
let cameraInput: AVCaptureDeviceInput
do {
cameraInput = try AVCaptureDeviceInput(device: camera!)
} catch {
print("Error setting device video input: \(error)")
return
}
if videoSession.canAddInput(cameraInput) {
videoSession.addInput(cameraInput)
}
if videoSession.canAddOutput(movieOutput) {
videoSession.addOutput(movieOutput)
}
previewLayer.videoGravity = .resizeAspectFill
previewLayer.session = videoSession
videoSession.startRunning()
// Add audio device
do {
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if videoSession.canAddInput(audioDeviceInput) {
videoSession.addInput(audioDeviceInput)
} else {
print("Could not add audio device input to the session")
}
} catch {
print("Could not create audio device input: \(error)")
}
}
Here are screenshots of my app being used: