3

I am attempting to use Apple's SFSpeechRecognition (xcode 9, ios11, swift 4). The following code uses a timer to start a new request for speech recognition. I have background capability on, microphone usage granted and speech recognition granted. If the phone is unlocked, everything works as expected. But when the phone is locked, I receive the following error:

2018-02-20 22:24:47.522562-0500 Speech-Recognition-Demo[3505:1234188] [Utility] +[AFAggregator logDictationFailedWithError:] Error Domain=kAFAssistantErrorDomain Code=1700 "(null)"

According to this link, speech recognition doesn't seem to work when app is in background mode but this information is old. I'm hoping someone has solved this or found a workaround.

Has anyone solved this problem or can anyone suggest something to try? My alternative is to require the Apple Watch for my app and I'd REALLY like to avoid that...

import UIKit
import Speech

class SpeechDetectionViewController: UIViewController, 
SFSpeechRecognizerDelegate {

    @IBOutlet weak var detectedTextLabel: UILabel!
    @IBOutlet weak var colorView: UIView!
    @IBOutlet weak var startButton: UIButton!

    let audioEngine = AVAudioEngine()
    let speechRecognizer: SFSpeechRecognizer? = SFSpeechRecognizer()
    var request: SFSpeechAudioBufferRecognitionRequest?
    var recognitionTask: SFSpeechRecognitionTask?
    var isRecording = false

// timers
var timer = Timer()
let timerInterval = 5.0
var secondsElapsed = 0

// toggle for taking commands
var takeCommands = true

override func viewDidLoad() {
    super.viewDidLoad()
    self.requestSpeechAuthorization()
    timer = Timer.scheduledTimer(timeInterval: timerInterval,target: self,selector: #selector(timerAction(timer:)) ,userInfo: nil,repeats: true)
}

    @objc func timerAction(timer:Timer){
    /* if takeCommands {
        startRecording()
    } else {
        stopRecording()
    }
        takeCommands = !takeCommands
    print("takeCommands: \(takeCommands)")
 */
    startRecording()

}

//MARK: IBActions and Cancel

@IBAction func startButtonTapped(_ sender: UIButton) {
    startRecording()
}

func startRecording(){
    if isRecording {
        print("STOP talking.")
        request?.endAudio() // Added line to mark end of recording
        request = nil
        //audioEngine.stop()

        if let node = audioEngine.inputNode {
            node.removeTap(onBus: 0)
        }
        recognitionTask?.cancel()

        isRecording = false
        startButton.backgroundColor = UIColor.gray

    } else {
        print("START talking.")
        self.recordAndRecognizeSpeech()
        isRecording = true
        startButton.backgroundColor = UIColor.red
    }
}

func stopRecording() {
    //audioEngine.stop()
    if let node = audioEngine.inputNode {
        node.removeTap(onBus: 0)
    }
    recognitionTask?.cancel()
    isRecording = false
    startButton.backgroundColor = UIColor.gray
}

//MARK: - Recognize Speech

func recordAndRecognizeSpeech() {
    request = SFSpeechAudioBufferRecognitionRequest()
    guard let node = audioEngine.inputNode else { return }
    let recordingFormat = node.outputFormat(forBus: 0)
    node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
        self.request?.append(buffer)
    }
    if !audioEngine.isRunning {
        audioEngine.prepare()
        do {
            try audioEngine.start()
        } catch {
            self.sendAlert(message: "There has been an audio engine error.")
            return print(error)
        }
    }

    guard let myRecognizer = SFSpeechRecognizer() else {
        self.sendAlert(message: "Speech recognition is not supported for your current locale.")
        return
    }
    if !myRecognizer.isAvailable {
        self.sendAlert(message: "Speech recognition is not currently available. Check back at a later time.")
        // Recognizer is not available right now
        return
    }

    recognitionTask = speechRecognizer?.recognitionTask(with: request!, resultHandler: { result, error in
        if result != nil { // check to see if result is empty (i.e. no speech found)
            if let result = result {
                let bestString = result.bestTranscription.formattedString
                self.detectedTextLabel.text = bestString

                var lastString: String = ""
                for segment in result.bestTranscription.segments {
                    let indexTo = bestString.index(bestString.startIndex, offsetBy: segment.substringRange.location)
                    lastString = bestString.substring(from: indexTo)
                }
                self.checkForColorsSaid(resultString: lastString)

            } else if let error = error {
                self.sendAlert(message: "There has been a speech recognition error")
                print(error)
            }
        }
    })
}

//MARK: - Check Authorization Status

func requestSpeechAuthorization() {
SFSpeechRecognizer.requestAuthorization { authStatus in
    OperationQueue.main.addOperation {
        switch authStatus {
        case .authorized:
            self.startButton.isEnabled = true
        case .denied:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "User denied access to speech recognition"
        case .restricted:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "Speech recognition restricted on this device"
        case .notDetermined:
            self.startButton.isEnabled = false
            self.detectedTextLabel.text = "Speech recognition not yet authorized"
        }
    }
}
}

    //MARK: - UI / Set view color.

func checkForColorsSaid(resultString: String) {
    switch resultString {
    case "red":
        colorView.backgroundColor = UIColor.red
    case "orange":
        colorView.backgroundColor = UIColor.orange
    case "yellow":
        colorView.backgroundColor = UIColor.yellow
    case "green":
        colorView.backgroundColor = UIColor.green
    case "blue":
        colorView.backgroundColor = UIColor.blue
    case "purple":
        colorView.backgroundColor = UIColor.purple
    case "black":
        colorView.backgroundColor = UIColor.black
    case "white":
        colorView.backgroundColor = UIColor.white
    case "gray":
        colorView.backgroundColor = UIColor.gray
    default: break
    }
}

//MARK: - Alert

func sendAlert(message: String) {
    let alert = UIAlertController(title: "Speech Recognizer Error", message: message, preferredStyle: UIAlertControllerStyle.alert)
    alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.default, handler: nil))
    self.present(alert, animated: true, completion: nil)
}
}

0 Answers0