1

My swift code below uses avfoundation to take a photo and place it on the imageview all in one class. I want to uses this code as a base and just when the uses calls func startVideo its recording and then when the user calls saveVideo func then it stops the recording and saves it into the imageivew.

enter image description here

import UIKit
import AVFoundation

class ViewController: UIViewController {

var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?

var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var image: UIImage?

override func viewDidLoad() {
    super.viewDidLoad()

    setupCaptureSession()
    setupDevice()
    setupInputOutput()
    setupPreviewLayer()
    startRunningCaptureSession()

    // Do any additional setup after loading the view, typically from a nib.
}

func setupCaptureSession () {
    captureSession.sessionPreset = AVCaptureSession.Preset.photo
}

 func setupDevice() {
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)

    let devices = deviceDiscoverySession.devices

    for device in devices {
        if device.position == AVCaptureDevice.Position.back {
            backCamera = device
        } else if device.position == AVCaptureDevice.Position.front {
            frontCamera = device

        }
    }

    currentCamera = backCamera


}

func setupInputOutput() {
    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
        captureSession.addInput(captureDeviceInput)
        photoOutput = AVCapturePhotoOutput()
        photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)

        captureSession.addOutput(photoOutput!)

    } catch {
        print(error)
    }


}

func setupPreviewLayer () {

    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
    cameraPreviewLayer?.frame = self.view.frame
    self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)





}
func startRunningCaptureSession () {
    captureSession.startRunning()
}





@IBAction func startVideo(_ sender: Any) {

}


    @IBOutlet weak var photo: UIImageView!




    @IBAction func saveVideo(_ sender: Any) {


    }}

     extension ViewController: AVCapturePhotoCaptureDelegate {
 func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
    //contains the photo variable
    if let imageData = photo.fileDataRepresentation() {
        print(imageData)
        image = UIImage(data: imageData)



    }

}}
  • In func cameraButton I want to start recording a video when called. Then when SaveButton is called I want the video stop recording and then be saved into the photo gallery in iPhone. –  Jan 24 '20 at 04:46
  • I don't know how to 1 take the video and 2 how to save it in the gallery. I changed my func's name –  Jan 24 '20 at 15:50
  • It does do some of the tasks it shows a live camera view on a view controller. That is import for what I am asking for. –  Jan 25 '20 at 05:04

1 Answers1

1

You can save record your video to file by creating and adding an AVCaptureMovieFileOutput to your capture session, and making your ViewController conform to the AVCaptureFileOutputRecordingDelegate.

This example records 5 seconds of video to a file called "output.mov" in the app's Documents directory.

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    var captureSession = AVCaptureSession()
    var sessionOutput = AVCaptureStillImageOutput()
    var movieOutput = AVCaptureMovieFileOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()

    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(animated: Bool) {
        self.cameraView = self.view

        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if device.position == AVCaptureDevicePosition.Front{


                do{

                    let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)

                    if captureSession.canAddInput(input){

                        captureSession.addInput(input)
                        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]

                        if captureSession.canAddOutput(sessionOutput){

                            captureSession.addOutput(sessionOutput)

                            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                            previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
                            cameraView.layer.addSublayer(previewLayer)

                            previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
                            previewLayer.bounds = cameraView.frame


                        }

                        captureSession.addOutput(movieOutput)

                        captureSession.startRunning()

                        let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
                        let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
                        try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
                        movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

                        let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
                        dispatch_after(delayTime, dispatch_get_main_queue()) {
                            print("stopping")
                            self.movieOutput.stopRecording()
                        }
                    }

                }
                catch{

                    print("Error")
                }

            }
        }

    }

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
        print("FINISHED \(error)")
        // save video to camera roll
        if error == nil {
            UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
        }
    }

}
cinemassacres
  • 389
  • 4
  • 16
  • What build version is this in swift 3, 4.2 or 5? –  Jan 31 '20 at 01:34
  • @YaoSmith this is in 2 but you could download the Documents directory from the device in Xcode > Devices > iPhone > app name > Download Container. You can save the video to the camera roll by calling UISaveVideoAtPathToSavedPhotosAlbum() from the didFinishRecordingToOutputFileAtURL delegate method. – cinemassacres Jan 31 '20 at 01:46
  • I was able to convert most of the code to have non errors. However I have added a pic above with the errors. If you can just tell me what to write so I don't get errors. I will award you the bounty. –  Jan 31 '20 at 02:07
  • @YaoSmith There is an 'appendingPathComponent' method for type 'URL' fixed by making sure it's a variable of type URL `let documentURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! let fileURL = documentURL.appendingPathComponent("tempImage.jpg")` second error You are calling the function with a string parameter, but the function does not take the string parameter. – cinemassacres Jan 31 '20 at 02:15
  • What do I put on the second error? dispatch_time_t(DispatchTime.now()), Int64(5 * Double(NSEC_PER_SEC))). Dispatch_Time is causing the error. –  Jan 31 '20 at 02:21
  • dispatch_time_t is a UInt64. The dispatch_queue_t is actually type aliased to an NSObject, but you should just use your familiar GCD methods to get queues. The block is a Swift closure. Specifically, dispatch_block_t is defined as () -> Void, which is equivalent to () -> (). – cinemassacres Jan 31 '20 at 02:24
  • So your stating that I have to put func setupCaptureSession from my original question into your answer. –  Jan 31 '20 at 02:44