1

I'm currently using the following code to allow users to take photos:

private func configurePhotoView() {
    capturedPhoto.contentMode = .ScaleAspectFill
    capturedPhoto.clipsToBounds = true
    capturedPhoto.hidden = true

    captureSession = AVCaptureSession()
    captureSession!.sessionPreset = AVCaptureSessionPresetPhoto

    photoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)

    var error: NSError?
    photoDeviceInput = AVCaptureDeviceInput(device: photoCaptureDevice, error: &error)

    if error == nil && captureSession!.canAddInput(photoDeviceInput) {
        captureSession!.addInput(photoDeviceInput)

        stillImageOutput = AVCaptureStillImageOutput()
        stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
        if captureSession!.canAddOutput(stillImageOutput) {
            captureSession!.addOutput(stillImageOutput)

            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
            previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.Portrait
            cameraView.layer.addSublayer(previewLayer)

            captureSession!.startRunning()

            self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: "focusPhoto:"))
        }
    }
    else {
        //TODO: Handle error
    }

    photoOverlay.backgroundColor = UIColor(white: 0, alpha: 0.5)
}

When they press a button this function gets called:

@IBAction func didPressTakePhoto(sender: UIButton) {
    if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {
        videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait
        stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {(sampleBuffer, error) in
            if (sampleBuffer != nil) {
                var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
                var dataProvider = CGDataProviderCreateWithCFData(imageData)
                var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)

                self.capturedImage = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)

                self.capturedPhoto.image = self.capturedImage
            }
        })
    }
}

This code lets me take photos with whatever aspect ratio I desire. Is there a way I can modify the didPressTakePhoto code to make a video?

I can't find even one swift tutorial on how to make a custom video recorder.

Max Hudson
  • 9,961
  • 14
  • 57
  • 107

1 Answers1

0

Your AVCaptureSession currently only has one output, which is an AVCaptureStillImageOutput. If you want to capture video, you need to add either an AVCaptureVideoDataOutput or an AVCaptureMovieFileOutput as an output to your capture session.

AV Foundation Programming Guide here: Still and Video Media Capture.

leekaiinthesky
  • 5,413
  • 4
  • 28
  • 39
  • Hi @leekaiinthesky do you happen to know how fix blurriness at the start and end of a video? Question: http://stackoverflow.com/questions/34912050/avoiding-blurriness-at-start-end-of-video-even-after-using-setpreferredvideos. Thanks for your time! – Crashalot Jan 21 '16 at 11:13