2

Logcat

    E/AndroidRuntime: FATAL EXCEPTION: main
     java.lang.RuntimeException: Unable to instantiate activity ComponentInfo{phone_finder.maxsoft.com.whereismyphone/PACKAGE.SpeechRecognizerResult}: java.lang.InstantiationException: can't instantiate class phone_finder.maxsoft.com.whereismyphone.SpeechRecognizerResult
         at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2034)
         at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2135)
         at android.app.ActivityThread.access$700(ActivityThread.java:140)
         at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1237)
         at android.os.Handler.dispatchMessage(Handler.java:99)
         at android.os.Looper.loop(Looper.java:137)
         at android.app.ActivityThread.main(ActivityThread.java:4921)
         at java.lang.reflect.Method.invokeNative(Native Method)
         at java.lang.reflect.Method.invoke(Method.java:511)
         at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1038)
         at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:805)
         at dalvik.system.NativeStart.main(Native Method)
      Caused by: java.lang.InstantiationException: can't instantiate class phone_finder.maxsoft.com.whereismyphone.SpeechRecognizerResult
         at java.lang.Class.newInstanceImpl(Native Method)
         at java.lang.Class.newInstance(Class.java:1319)
         at android.app.Instrumentation.newActivity(Instrumentation.java:1068)
         at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2025)
         at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2135) 
         at android.app.ActivityThread.access$700(ActivityThread.java:140) 
         at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1237) 
         at android.os.Handler.dispatchMessage(Handler.java:99) 
         at android.os.Looper.loop(Looper.java:137) 
         at android.app.ActivityThread.main(ActivityThread.java:4921) 
         at java.lang.reflect.Method.invokeNative(Native Method) 
         at java.lang.reflect.Method.invoke(Method.java:511) 
         at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1038) 
         at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:805) 
         at dalvik.system.NativeStart.main(Native Method) 

My app will recognize a word e.g. mobile and after that it does something. I find it simple with GoogleSpeechRecognizer. like below:

public abstract class SpeechRecognizerResult extends Activity implements SpeechRecognizerManager.OnResultListener {
private final String TAG = getClass().getSimpleName();
private SpeechRecognizerManager mSpeechRecognizerManager;
private TextView txt_result;

protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.recognizer_listener);

    mSpeechRecognizerManager =new SpeechRecognizerManager(this);
    mSpeechRecognizerManager.setOnResultListner(this);

    txt_result= (TextView) findViewById(R.id.text_result);
}

@Override
public void OnResult(ArrayList<String> commands) {

    for(String command:commands) {
        if (command.equals("mobile")){
            Toast.makeText(this, "You said:" + command, Toast.LENGTH_SHORT).show();
            txt_result.setText(command);
            return;
        }

    }
}
}

and

public class SpeechRecognizerManager {
private Context mContext;
protected android.speech.SpeechRecognizer mGoogleSpeechRecognizer;
protected Intent mSpeechRecognizerIntent;
private OnResultListener mOnResultListener;


public SpeechRecognizerManager(Context context) {
    this.mContext = context;
    initGoogleSpeechRecognizer();

}


private void initGoogleSpeechRecognizer() {

    mGoogleSpeechRecognizer = android.speech.SpeechRecognizer
            .createSpeechRecognizer(mContext);

    mGoogleSpeechRecognizer.setRecognitionListener(new GoogleRecognitionListener());

    mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CONFIDENCE_SCORES, true);
}


public void destroy() {
    if (mGoogleSpeechRecognizer != null) {
        mGoogleSpeechRecognizer.cancel();
        ;
        mGoogleSpeechRecognizer.destroy();
    }

}



protected class GoogleRecognitionListener implements
        android.speech.RecognitionListener {

    private final String TAG = GoogleRecognitionListener.class
            .getSimpleName();

    @Override
    public void onBeginningOfSpeech() {
    }

    @Override
    public void onEndOfSpeech() {
    }

    @Override
    public void onReadyForSpeech(Bundle params) {
    }

    @Override
    public void onRmsChanged(float rmsdB) {
    }

    @Override
    public void onBufferReceived(byte[] buffer) {

    }

    @Override
    public void onError(int error) {
        Log.e(TAG, "onError:" + error);

    }

    @Override
    public void onPartialResults(Bundle partialResults) {
        Log.d(TAG, "onPartialResultsheard:");

    }

    @Override
    public void onResults(Bundle results) {
        if ((results != null)
                && results
                .containsKey(android.speech.SpeechRecognizer.RESULTS_RECOGNITION)) {
            ArrayList<String> heard = results
                    .getStringArrayList(android.speech.SpeechRecognizer.RESULTS_RECOGNITION);
            float[] scores = results
                    .getFloatArray(android.speech.SpeechRecognizer.CONFIDENCE_SCORES);

            for (int i = 0; i < heard.size(); i++) {
                Log.d(TAG, "onResultsheard:" + heard.get(i)
                        + " confidence:" + scores[i]);

            }


            //send list of words to activity
            if (mOnResultListener!=null){
                mOnResultListener.OnResult(heard);
            }

        }



    }


    @Override
    public void onEvent(int eventType, Bundle params) {

    }

}



    public void setOnResultListner(OnResultListener onResultListener){
        mOnResultListener=onResultListener;
    }

public interface OnResultListener{
    public void OnResult(ArrayList<String> commands);
}
  • What should i do for solving?
  • How can i continue recognization (after finding the first)?
Sathish Kumar J
  • 4,280
  • 1
  • 20
  • 48
Mina Dahesh
  • 332
  • 8
  • 21
  • Make your SpeechRecognizerResult class public, not abstract. – brandall Jul 07 '16 at 15:05
  • @brandall. oh! thanks alot. but now time nothing happens as i say *mobile*. why? – Mina Dahesh Jul 08 '16 at 06:49
  • Where are you calling `startListening()` ? I can't see it in your code. – brandall Jul 08 '16 at 16:20
  • @brandall. I want it does when the device is locked(although app runs at the background), so where should i put it? – Mina Dahesh Jul 09 '16 at 11:54
  • A lock screen widget – brandall Jul 09 '16 at 13:43
  • I don't understand what you mean? Is it different from using Service? Because I want to recognize the speech when the device is locked and app runs at back. May I ask you say more information – Mina Dahesh Jul 09 '16 at 17:43
  • Do you want the user to have to press a button to start the speech, if so, you can have one on the lock screen to start your service. Or are you wanting the speech recognition to be running continuously? If so, you'll need to use PocketSphinx http://stackoverflow.com/a/4396749/1256219 – brandall Jul 09 '16 at 19:00
  • @brandall. my answer is just a mistake in stack app. I want to do both of them, with using switch user allow app to speech recognize during lock screen. Ii try pocket sphinx to, but i can't run it. i don't know where keywords must be define? http://stackoverflow.com/q/37225500/4568864 – Mina Dahesh Jul 10 '16 at 14:52

0 Answers0