0

I want to create a custom camera that records square videos. However, all of the capture session presets are not in square format

For example:

  • 1920x1080

  • 1280x720

  • 640x480

Is there anyway i can create a custom preset. I still want to have high quality videos but in square format. The closest thing to square format is 640x480, but i still want HD Quality.

Is this possible, if so how? Thank you

Michaelcode
  • 2,067
  • 3
  • 15
  • 19
  • You should take a look [https://stackoverflow.com/questions/36743842/record-square-video-using-avfoundation-and-add-watermark](https://stackoverflow.com/questions/36743842/record-square-video-using-avfoundation-and-add-watermark) – Ketan Parmar Jul 05 '17 at 05:02
  • Where do i input the settings? I can't tell from the answer. Can you give a detailed explanation – Michaelcode Jul 05 '17 at 05:13

1 Answers1

7

import necessary frameworks:

import UIKit
import AVFoundation
import Photos

Than define your class with needed delegates

class CameraViewController: UIViewController,
    AVCaptureAudioDataOutputSampleBufferDelegate,
    AVCaptureVideoDataOutputSampleBufferDelegate { }

You will need next properties for capture and save audio and video streams

private var session: AVCaptureSession = AVCaptureSession()
private var deviceInput: AVCaptureDeviceInput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()

private var videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
private var audioConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?

private var assetWriter: AVAssetWriter?
private var audioInput: AVAssetWriterInput?
private var videoInput: AVAssetWriterInput?

private var fileManager: FileManager = FileManager()
private var recordingURL: URL?

private var isCameraRecording: Bool = false
private var isRecordingSessionStarted: Bool = false

private var recordingQueue = DispatchQueue(label: "recording.queue")

Now you can start from configuration of your AVCaptureSession and AVAssetWriter

self.session.sessionPreset = AVCaptureSessionPresetHigh     
self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mov")
if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
    _ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
}
self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
    fileType: AVFileTypeQuickTimeMovie)

Descrive audio and video settings. For example we will make square in full screen width

let audioSettings = [
    AVFormatIDKey : kAudioFormatAppleIMA4,
    AVNumberOfChannelsKey : 1,
    AVSampleRateKey : 16000.0
] as [String : Any]

let videoSettings = [
    AVVideoCodecKey : AVVideoCodecH264,
    AVVideoWidthKey : UIScreen.main.bounds.width,
    AVVideoHeightKey : UIScreen.main.bounds.width
] as [String : Any]

Add this settings to AVAssetWriterInput

self.videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
     outputSettings: videoSettings)
self.audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio,
     outputSettings: audioSettings)

self.videoInput?.expectsMediaDataInRealTime = true
self.audioInput?.expectsMediaDataInRealTime = true

if self.assetWriter!.canAdd(self.videoInput!) {
    self.assetWriter?.add(self.videoInput!)
}

if self.assetWriter!.canAdd(self.audioInput!) {
    self.assetWriter?.add(self.audioInput!)
}

Add AVCaptureDeviceInput to your session

self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
if self.session.canAddInput(self.deviceInput) {
    self.session.addInput(self.deviceInput)
}

Now you can configure AVCaptureVideoPreviewLayer to be a square

self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)

//importent line of code what will did a trick
self.previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

let rootLayer = self.view.layer
rootLayer.masksToBounds = true
self.previewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width)

rootLayer.insertSublayer(self.previewLayer!, at: 0)
        otLayer.insertSublayer(self.previewLayer!, at: 0)

Start session

self.session.startRunning()

Add video and audio outputs to session

DispatchQueue.main.async {
    self.session.beginConfiguration()

    if self.session.canAddOutput(self.videoOutput) {
        self.session.addOutput(self.videoOutput)
    }

    self.videoConnection = self.videoOutput.connection(withMediaType: AVMediaTypeVideo)
    if self.videoConnection?.isVideoStabilizationSupported == true {
        self.videoConnection?.preferredVideoStabilizationMode = .auto
    }
    self.session.commitConfiguration()

    let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
    let audioIn = try? AVCaptureDeviceInput(device: audioDevice)

    if self.session.canAddInput(audioIn) {
        self.session.addInput(audioIn)
    }

    if self.session.canAddOutput(self.audioOutput) {
        self.session.addOutput(self.audioOutput)
    }

    self.audioConnection = self.audioOutput.connection(withMediaType: AVMediaTypeAudio)
}

}

From this point your Camera is properly configured. Your need to implement next method what will record video and audio samples

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer
    sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

if !self.isRecordingSessionStarted {
    let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
    self.assetWriter?.startSession(atSourceTime: presentationTime)
    self.isRecordingSessionStarted = true
}

let description = CMSampleBufferGetFormatDescription(sampleBuffer)!

if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
    if self.audioInput!.isReadyForMoreMediaData {
        print("appendSampleBuffer audio");
        self.audioInput?.append(sampleBuffer)
    }
} else {
    if self.videoInput!.isReadyForMoreMediaData {
        print("appendSampleBuffer video");
        if !self.videoInput!.append(sampleBuffer) {
            print("Error writing video buffer");
        }
    }
}

}

Add add method what will start/stop recording

private func startRecording() {
    if self.assetWriter?.startWriting() != true {
        print("error: \(self.assetWriter?.error.debugDescription ?? "")")
    }

    self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
    self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}

private func stopRecording() {
    self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
    self.audioOutput.setSampleBufferDelegate(nil, queue: nil)

    self.assetWriter?.finishWriting {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.recordingURL!)
        }) { saved, error in
            if saved {
                let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                alertController.addAction(defaultAction)
                self.present(alertController, animated: true, completion: nil)
            } else {
                print(error.debugDescription)
            }
        }
        print("saved")
    }
}

And now you can start/stop camera recording from IBActionfunction

@IBAction func recordingButton(_ sender: Any) {
    if self.isCameraRecording {
        self.stopRecording()
    } else {
        self.startRecording()
    }
    self.isCameraRecording = !self.isCameraRecording
}

Here is GitHub example: https://github.com/ChernyshenkoTaras/CustomCamera

Taras Chernyshenko
  • 2,729
  • 14
  • 27