2

I am having an issue with this code using Xcode 9 and Swift. It will build fully and run on my device, showing the camera output properly, but will not actually call captureOutput to process the data.

eimport UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {

    var video = AVCaptureVideoPreviewLayer()

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.

        //Creating session
        print("Creating Session")
        let session = AVCaptureSession()

        //Define capture devcie AVCaptureDeviceInput(device: captureDevice!)
        print("Defining Capturing device")
        let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)

        do
        {
            print("Initialzing input")
            let input = try AVCaptureDeviceInput(device: captureDevice!)
            session.addInput(input)
        }
        catch
        {
            print("ERROR")
        }

        print("Initializing output")
        let output = AVCaptureMetadataOutput()
        session.addOutput(output)

        output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)

        output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        video = AVCaptureVideoPreviewLayer(session: session)
        video.frame = view.layer.bounds
        view.layer.addSublayer(video)

        //self.view.bringSubview(toFront: square)

        session.startRunning()
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {

        print("Processing Data.")
        if metadataObjects != nil && metadataObjects.count != 0
        {
            if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
            {
                if object.type == AVMetadataObject.ObjectType.qr
                {
                    print("Found a pdf417.")
                    let alert = UIAlertController(title: "QR Scanner", message: object.stringValue, preferredStyle: .alert)
                    alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
                    alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
                        UIPasteboard.general.string = object.stringValue
                    }))

                    present(alert, animated: true, completion: nil)
                }
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }


}

In the console, I am only getting output down to the print("Initializing output"). This tells me that the captureOutput function is not being called by the delegate. I have tried changing the scope of the session to no avail.

Any ideas?

Connor
  • 215
  • 1
  • 13
  • 2
    There's no `captureOutput(_:didOutputMetadataObjects:from:)`. What you should implement is [`metadataOutput(_:didOutput:from:)`](https://developer.apple.com/documentation/avfoundation/avcapturemetadataoutputobjectsdelegate/1389481-metadataoutput). – OOPer Sep 19 '17 at 21:32

0 Answers0