here is my code
import UIKit
import AVFoundation
class ViewController: UIViewController {
@IBOutlet weak var cameraView: UIView!
var image: UIImage!
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewDidAppear(_ animated: Bool) {
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
}
@IBAction func cameraButton_Tab(_ sender: Any) {
let settings = AVCapturePhotoSettings()
// performSegue(withIdentifier: "showPhoto_Segue", sender: nil)
photoOutput?.capturePhoto(with: settings, delegate: self)
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
}else if device.position == AVCaptureDevice.Position.front{
frontCamera = device
}
}
currentCamera = backCamera
}
func setupInputOutput() {
do{
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
}catch {
print(error)
}
}
func setupPreviewLayer() {
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer!.frame = self.cameraView.bounds
self.cameraView.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
}
extension ViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation(){
image = UIImage(data: imageData)
}
}
}
See the image, I want save the image which background's color is yellow I can see the camera through of that
But I save the image, it seems that save the whole view, not square.
I make the UIImageView same size of yellow UIView and save the output, it takes the whole view capture and resize of that.
Like change rectangle to square with squeeze
How I cant catch just yellow background size and save?