First of all, I know there is so much questions are already asked about this voice recognition in background or service. I think I've checked all of them in 2 weeks :P. But I did not understand all these answers. I also used there code but it's not working. What I want is when user clicks on a button to start voice recognition service then the service starts and even the android is locked the service listen instructions from the user. Can somebody tell me how can I achieve this or any tutorials. I'm working on this from 2 weeks. I have searched a lot on google and SO also.
==================Update============================== I'm Calling a Service in MainActivity but the service is started and and also receive the message but the RecognitionListener class methods did not start. I'm using the code from this Android Speech Recognition Continuous Service
Can somebody tell me what's going wrong in my code....
This is MainActivity
package com.android.jarvis.voicerecognitionservice;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Build;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import static com.android.jarvis.voicerecognitionservice.BuildConfig.DEBUG;
public class MainActivity extends AppCompatActivity {
static final String TAG = "Service";
private int mBindFlag;
private Messenger mServiceMessenger;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Intent service = new Intent(MainActivity, RecognitionService.class);
startService(new Intent(this, RecognitionService.class));
mBindFlag = Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH ? 0 : Context.BIND_ABOVE_CLIENT;
}
@Override
protected void onStart()
{
super.onStart();
bindService(new Intent(this, RecognitionService.class), mServiceConnection, mBindFlag);
}
@Override
protected void onStop()
{
super.onStop();
if (mServiceMessenger != null)
{
unbindService(mServiceConnection);
mServiceMessenger = null;
}
}
private final ServiceConnection mServiceConnection = new ServiceConnection()
{
@Override
public void onServiceConnected(ComponentName name, IBinder service)
{
if (DEBUG) {Log.d(TAG, "onServiceConnected");} //$NON-NLS-1$
mServiceMessenger = new Messenger(service);
Message msg = new Message();
msg.what = RecognitionService.MSG_RECOGNIZER_START_LISTENING;
try
{
mServiceMessenger.send(msg);
Log.d(TAG,"Message Sent");
}
catch (RemoteException e)
{
e.printStackTrace();
}
}
@Override
public void onServiceDisconnected(ComponentName name)
{
if (DEBUG) {
Log.d(TAG, "onServiceDisconnected");} //$NON-NLS-1$
mServiceMessenger = null;
}
}; //
}
This is Recognition Service
package com.android.jarvis.voicerecognitionservice;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.util.Log;
import android.widget.Toast;
import java.lang.ref.WeakReference;
import static com.android.jarvis.voicerecognitionservice.MainActivity.TAG;
public class RecognitionService extends Service {
static AudioManager mAudioManager;
protected SpeechRecognizer mSpeechRecognizer;
protected Intent mSpeechRecognizerIntent;
protected final Messenger mServerMessenger = new Messenger(new IncomingHandler(this));
static boolean mIsListening;
static volatile boolean mIsCountDownOn;
static boolean mIsStreamSolo;
static final int MSG_RECOGNIZER_START_LISTENING = 1;
static final int MSG_RECOGNIZER_CANCEL = 2;
@Override
public void onCreate()
{
super.onCreate();
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
mSpeechRecognizer.setRecognitionListener(new SpeechRecognitionListener());
mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
this.getPackageName());
Toast.makeText(this, "Service Started", Toast.LENGTH_SHORT).show();
}
protected static class IncomingHandler extends Handler
{
private WeakReference<RecognitionService> mtarget;
IncomingHandler(RecognitionService target)
{
mtarget = new WeakReference<RecognitionService>(target);
}
@Override
public void handleMessage(Message msg)
{
final RecognitionService target = mtarget.get();
switch (msg.what)
{
case MSG_RECOGNIZER_START_LISTENING:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
{
// turn off beep sound
// if (!mIsStreamSolo)
// {
// mAudioManager.setStreamSolo(AudioManager.STREAM_VOICE_CALL, true);
// mIsStreamSolo = true;
// }
}
if (!target.mIsListening)
{
target.mSpeechRecognizer.startListening(target.mSpeechRecognizerIntent);
target.mIsListening = true;
Log.d(TAG, "message start listening"); //$NON-NLS-1$
}
break;
case MSG_RECOGNIZER_CANCEL:
if (mIsStreamSolo)
{
mAudioManager.setStreamSolo(AudioManager.STREAM_VOICE_CALL, false);
mIsStreamSolo = false;
}
target.mSpeechRecognizer.cancel();
target.mIsListening = false;
Log.d(TAG, "message canceled recognizer"); //$NON-NLS-1$
break;
}
}
}
// Count down timer for Jelly Bean work around
protected CountDownTimer mNoSpeechCountDown = new CountDownTimer(5000, 5000)
{
@Override
public void onTick(long millisUntilFinished)
{
// TODO Auto-generated method stub
}
@Override
public void onFinish()
{
mIsCountDownOn = false;
Message message = Message.obtain(null, MSG_RECOGNIZER_CANCEL);
try
{
mServerMessenger.send(message);
message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
mServerMessenger.send(message);
}
catch (RemoteException e)
{
}
}
};
@Override
public void onDestroy()
{
super.onDestroy();
if (mIsCountDownOn)
{
mNoSpeechCountDown.cancel();
}
if (mSpeechRecognizer != null)
{
mSpeechRecognizer.destroy();
}
}
@Override
public IBinder onBind(Intent intent)
{
Log.d(TAG, "onBind"); //$NON-NLS-1$
return mServerMessenger.getBinder();
}
protected class SpeechRecognitionListener implements RecognitionListener
{
@Override
public void onBeginningOfSpeech()
{
// speech input will be processed, so there is no need for count down anymore
if (mIsCountDownOn)
{
mIsCountDownOn = false;
mNoSpeechCountDown.cancel();
}
Log.d(TAG, "onBeginingOfSpeech"); //$NON-NLS-1$
}
@Override
public void onBufferReceived(byte[] buffer)
{
}
@Override
public void onEndOfSpeech()
{
Log.d(TAG, "onEndOfSpeech"); //$NON-NLS-1$
}
@Override
public void onError(int error)
{
if (mIsCountDownOn)
{
mIsCountDownOn = false;
mNoSpeechCountDown.cancel();
}
mIsListening = false;
Message message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
try
{
mServerMessenger.send(message);
}
catch (RemoteException e)
{
}
Log.d(TAG, "error = " + error); //$NON-NLS-1$
}
@Override
public void onEvent(int eventType, Bundle params)
{
}
@Override
public void onPartialResults(Bundle partialResults)
{
}
@Override
public void onReadyForSpeech(Bundle params)
{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
{
mIsCountDownOn = true;
mNoSpeechCountDown.start();
}
Log.d(TAG, "onReadyForSpeech"); //$NON-NLS-1$
}
@Override
public void onResults(Bundle results)
{
Log.d(TAG, "onResults"); //$NON-NLS-1$
}
@Override
public void onRmsChanged(float rmsdB)
{
}
}
}