1

here is my code

import UIKit
import AVFoundation

class ViewController: UIViewController {

    @IBOutlet weak var cameraView: UIView!

    var image: UIImage!

    var captureSession = AVCaptureSession()
    var backCamera: AVCaptureDevice?
    var frontCamera: AVCaptureDevice?
    var currentCamera: AVCaptureDevice?

    var photoOutput: AVCapturePhotoOutput?

    var cameraPreviewLayer: AVCaptureVideoPreviewLayer?

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.


    }

    override func viewDidAppear(_ animated: Bool) {

        setupCaptureSession()
        setupDevice()
        setupInputOutput()
        setupPreviewLayer()
        startRunningCaptureSession()

    }

    @IBAction func cameraButton_Tab(_ sender: Any) {

        let settings = AVCapturePhotoSettings()

//        performSegue(withIdentifier: "showPhoto_Segue", sender: nil)
        photoOutput?.capturePhoto(with: settings, delegate: self)

    }

    func setupCaptureSession() {

        captureSession.sessionPreset = AVCaptureSession.Preset.photo

    }

    func setupDevice() {

        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)

        let devices = deviceDiscoverySession.devices

        for device in devices {

            if device.position == AVCaptureDevice.Position.back {

                backCamera = device

            }else if device.position == AVCaptureDevice.Position.front{

                frontCamera = device

            }

        }

        currentCamera = backCamera

    }

    func setupInputOutput() {

        do{

            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutput = AVCapturePhotoOutput()
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
            captureSession.addOutput(photoOutput!)

        }catch {
            print(error)
        }

    }

    func setupPreviewLayer() {

        cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
        cameraPreviewLayer!.frame = self.cameraView.bounds
        self.cameraView.layer.insertSublayer(cameraPreviewLayer!, at: 0)

    }

    func startRunningCaptureSession() {

        captureSession.startRunning()

    }

}

extension ViewController: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        if let imageData = photo.fileDataRepresentation(){
            image = UIImage(data: imageData)

        }
    }

}

enter image description here

See the image, I want save the image which background's color is yellow I can see the camera through of that

But I save the image, it seems that save the whole view, not square.

I make the UIImageView same size of yellow UIView and save the output, it takes the whole view capture and resize of that.

Like change rectangle to square with squeeze

How I cant catch just yellow background size and save?

Polaris Nation
  • 1,085
  • 2
  • 18
  • 49

1 Answers1

1

This didFinishProcessingPhoto will return the complete image like what camera is seeing. You won't the image directly which is shown in your PreviewLayer. So, in order to get the UIImage of shown PreviewLayer, you can resize the captured image.

Well, resize can also be done in two ways: One keeping aspect ratio and other by passing the exact size. I would recommend to go with aspect ratio because it will ensure that your image won't be squeeze or streched from any size, while passing wrong size won't able to fulfil you requirement.

Resize UIImage passing new CGSize:

extension UIImage {
    func scaleImage(toSize newSize: CGSize) -> UIImage? {
        var newImage: UIImage?
        let newRect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height).integral
        UIGraphicsBeginImageContextWithOptions(newSize, false, 0)
        if let context = UIGraphicsGetCurrentContext(), let cgImage = self.cgImage {
            context.interpolationQuality = .high
            let flipVertical = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: newSize.height)
            context.concatenate(flipVertical)
            context.draw(cgImage, in: newRect)
            if let img = context.makeImage() {
                newImage = UIImage(cgImage: img)
            }
            UIGraphicsEndImageContext()
        }
        return newImage
    }
}

Usage: capturedImage.scaleImage(toSize: CGSize(width: 300, height: 300))

Resize UIImage keeping aspect ratio:

extension UIImage {
    func scaleImage(toWidth newWidth: CGFloat) -> UIImage {
        let scale = newWidth / self.size.width
        let newHeight = self.size.height * scale
        let newSize = CGSize(width: newWidth, height: newHeight)

        let renderer = UIGraphicsImageRenderer(size: newSize)

        let image = renderer.image { (context) in
            self.draw(in: CGRect(origin: CGPoint(x: 0, y: 0), size: newSize))
        }
        return image
    }
}

Usage: capturedImage.scaleImage(toWidth: 300)

Reference: Resize UIImage to 200x200pt/px

Update:

Keep the below method as it is in your code:

@IBAction func cameraButton_Tab(_ sender: Any) {
    let settings = AVCapturePhotoSettings()
    photoOutput?.capturePhoto(with: settings, delegate: self)
}

extension ViewController: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        if let imageData = photo.fileDataRepresentation(){
            let capturedImage = UIImage(data: imageData)
            let cropImage = capturedImage.scaleImage(toWidth: cameraPreviewLayer!.frame.size.width) //It will return the Image size of Camera Preview
        }
    }
}
Sohil R. Memon
  • 9,404
  • 1
  • 31
  • 57
  • Thanks, but I have question of it, if I take the last method, where I call that? What is PreviewLayer? – Polaris Nation Jan 25 '19 at 06:37
  • you assigned with your photo – Hiền Đỗ Jan 25 '19 at 06:42
  • @PolarisNation `let settings = AVCapturePhotoSettings() photoOutput?.capturePhoto(with: settings, delegate: self) ` this method only use when you want to resize `UIImage` manually under `didFinishProcessingPhoto` using resize methods I mentioned! – Sohil R. Memon Jan 25 '19 at 06:50
  • @SohilR.Memon I'm sorry I didn't understand so quickly. Ok I delete of that and just call takeScreenshot and then call UIImageWriteToSavedPhotosAlbum, but it save the yellow square not preview – Polaris Nation Jan 25 '19 at 07:16
  • @PolarisNation Sorry refer this, https://stackoverflow.com/questions/44841331/ios-screenshot-live-camera-preview which says screenshot of Camera preview is not possible. You can use other methods as stated – Sohil R. Memon Jan 25 '19 at 07:23
  • @SohilR.Memon I put second method of it. but it show the error. Use of unresolved identifier 'newWidth'; did you mean 'wcwidth'? – Polaris Nation Jan 25 '19 at 07:35
  • @SohilR.Memon I appreciate of your help . I'll try it, thanks – Polaris Nation Jan 25 '19 at 08:44
  • Apple documentation link for ?: This didFinishProcessingPhoto will return the complete image like what camera is seeing. – Kapil_A Oct 01 '20 at 11:32
  • @Kapil_A Please check here: https://developer.apple.com/documentation/avfoundation/avcapturephotocapturedelegate/2873949-photooutput – Sohil R. Memon Oct 01 '20 at 12:29