I am working on an app that takes a photo with the front camera, but displays the back camera. There are no warnings or errors in my code. When I run it all works fine. The camera view loads, and you can see what the back camera sees. All works until I press the photo button. Then the button can't be pressed anymore, and the AppDelegate.swift opens and highlights the line class AppDelegate: UIResponder, UIApplicationDelegate {
and gives the error: "Thread 1: signal SIGABRT"
Here is my ViewController.swift:
import UIKit
import AVFoundation
class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
var captureSession : AVCaptureSession?
var stillImageOutput : AVCaptureStillImageOutput?
var stillImageOutput2 : AVCaptureStillImageOutput?
var previewLayer : AVCaptureVideoPreviewLayer?
var captureSession2 : AVCaptureSession?
@IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
previewLayer?.frame = cameraView.bounds
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
do {
let input = try AVCaptureDeviceInput(device: backCamera)
captureSession?.addInput(input)
}catch{
}
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if ((captureSession?.canAddOutput(stillImageOutput)) != nil) {
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
}
func ohterCamera() {
captureSession2 = AVCaptureSession()
captureSession2?.sessionPreset = AVCaptureSessionPreset1920x1080
let frontCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
do {
let input = try AVCaptureDeviceInput(device: frontCamera)
captureSession?.addInput(input)
}catch{
}
stillImageOutput2 = AVCaptureStillImageOutput()
stillImageOutput2?.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if ((captureSession2?.canAddOutput(stillImageOutput2)) != nil) {
captureSession2?.addOutput(stillImageOutput2)
captureSession2?.startRunning()
}
}
var savedImage:UIImage!
func didTakePhoto() {
if let videoConection = stillImageOutput2?.connectionWithMediaType(AVMediaTypeVideo){
videoConection.videoOrientation = AVCaptureVideoOrientation.Portrait
stillImageOutput2?.captureStillImageAsynchronouslyFromConnection(videoConection, completionHandler: { (sampleBuffer, ErrorType) -> Void in
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProviderCreateWithCFData(imageData)
let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, .RenderingIntentDefault)
self.savedImage = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
}
})
}
}
@IBAction func takePhotoBtn(sender: UIButton) {
didTakePhoto()
UIImageWriteToSavedPhotosAlbum(savedImage, nil, nil, nil)
}
}
And here is the AppDelegate.Swift:
mport UIKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool {
// Override point for customization after application launch.
return true
}
func applicationWillResignActive(application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
func applicationDidEnterBackground(application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(application: UIApplication) {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
}
Does anyone know why I get this error, and/or how I can fix it? From what i found when I googled it this means there is a broken connection, but as far as i can see all the connections are fine...