问题
I'm trying to create a service to run continuous speech recognition in Android 4.2. Using the answer from this link ( Android Speech Recognition as a service on Android 4.1 & 4.2 ), I created a service that is run from an Activity. My problem is that I get null exceptions when accessing mTarget.mAudioManager
or mTarget.mSpeechRecognizerIntent
in the handleMessage method. The target (and mTarget object created from it) is not null, but all the objects inside it are.
What am I doing wrong here?
Relevant Activity Code (static methods called from activity, activityContext is the activity this method is called from):
public static void init(Context context)
{
voiceCommandService = new VoiceCommandService();
activityContext = context;
}
public static void startContinuousListening()
{
Intent service = new Intent(activityContext, VoiceCommandService.class);
activityContext.startService(service);
Message msg = new Message();
msg.what = VoiceCommandService.MSG_RECOGNIZER_START_LISTENING;
try
{
voiceCommandService.mServerMessenger.send(msg);
}
catch (RemoteException e)
{
e.printStackTrace();
}
}
Service Code:
public class VoiceCommandService extends Service
{
protected AudioManager mAudioManager;
protected SpeechRecognizer mSpeechRecognizer;
protected Intent mSpeechRecognizerIntent;
protected final Messenger mServerMessenger = new Messenger(new IncomingHandler(this));
protected boolean mIsListening;
protected volatile boolean mIsCountDownOn;
static final int MSG_RECOGNIZER_START_LISTENING = 1;
static final int MSG_RECOGNIZER_CANCEL = 2;
@Override
public void onCreate()
{
super.onCreate();
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
mSpeechRecognizer.setRecognitionListener(new SpeechRecognitionListener());
mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
this.getPackageName());
}
protected static class IncomingHandler extends Handler
{
private WeakReference<VoiceCommandService> mtarget;
IncomingHandler(VoiceCommandService target)
{
mtarget = new WeakReference<VoiceCommandService>(target);
}
@Override
public void handleMessage(Message msg)
{
final VoiceCommandService target = mtarget.get();
switch (msg.what)
{
case MSG_RECOGNIZER_START_LISTENING:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
{
// turn off beep sound
target.mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, true);
}
if (!target.mIsListening)
{
target.mSpeechRecognizer.startListening(target.mSpeechRecognizerIntent);
target.mIsListening = true;
//Log.d(TAG, "message start listening"); //$NON-NLS-1$
}
break;
case MSG_RECOGNIZER_CANCEL:
target.mSpeechRecognizer.cancel();
target.mIsListening = false;
//Log.d(TAG, "message canceled recognizer"); //$NON-NLS-1$
break;
}
}
}
// Count down timer for Jelly Bean work around
protected CountDownTimer mNoSpeechCountDown = new CountDownTimer(5000, 5000)
{
@Override
public void onTick(long millisUntilFinished)
{
// TODO Auto-generated method stub
}
@Override
public void onFinish()
{
mIsCountDownOn = false;
Message message = Message.obtain(null, MSG_RECOGNIZER_CANCEL);
try
{
mServerMessenger.send(message);
message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
mServerMessenger.send(message);
}
catch (RemoteException e)
{
}
}
};
@Override
public void onDestroy()
{
super.onDestroy();
if (mIsCountDownOn)
{
mNoSpeechCountDown.cancel();
}
if (mSpeechRecognizer != null)
{
mSpeechRecognizer.destroy();
}
}
protected class SpeechRecognitionListener implements RecognitionListener
{
private static final String TAG = "SpeechRecognitionListener";
@Override
public void onBeginningOfSpeech()
{
// speech input will be processed, so there is no need for count down anymore
if (mIsCountDownOn)
{
mIsCountDownOn = false;
mNoSpeechCountDown.cancel();
}
//Log.d(TAG, "onBeginingOfSpeech"); //$NON-NLS-1$
}
@Override
public void onBufferReceived(byte[] buffer)
{
}
@Override
public void onEndOfSpeech()
{
//Log.d(TAG, "onEndOfSpeech"); //$NON-NLS-1$
}
@Override
public void onError(int error)
{
if (mIsCountDownOn)
{
mIsCountDownOn = false;
mNoSpeechCountDown.cancel();
}
mIsListening = false;
Message message = Message.obtain(null, MSG_RECOGNIZER_START_LISTENING);
try
{
mServerMessenger.send(message);
}
catch (RemoteException e)
{
}
//Log.d(TAG, "error = " + error); //$NON-NLS-1$
}
@Override
public void onEvent(int eventType, Bundle params)
{
}
@Override
public void onPartialResults(Bundle partialResults)
{
}
@Override
public void onReadyForSpeech(Bundle params)
{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
{
mIsCountDownOn = true;
mNoSpeechCountDown.start();
mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, false);
}
Log.d(TAG, "onReadyForSpeech"); //$NON-NLS-1$
}
@Override
public void onResults(Bundle results)
{
//Log.d(TAG, "onResults"); //$NON-NLS-1$
}
@Override
public void onRmsChanged(float rmsdB)
{
}
}
@Override
public IBinder onBind(Intent arg0) {
// TODO Auto-generated method stub
return null;
}
}
回答1:
Class members in MainActivity
private int mBindFlag;
private Messenger mServiceMessenger;
Start service in onCreate()
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
Intent service = new Intent(activityContext, VoiceCommandService.class);
activityContext.startService(service);
mBindFlag = Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH ? 0 : Context.BIND_ABOVE_CLIENT;
}
Bind service in onStart()
@Override
protected void onStart()
{
super.onStart();
bindService(new Intent(this, VoiceCommandService.class), mServiceConnection, mBindFlag);
}
@Override
protected void onStop()
{
super.onStop();
if (mServiceMessenger != null)
{
unbindService(mServiceConnection);
mServiceMessenger = null;
}
}
mServiceConnection
member
private final ServiceConnection mServiceConnection = new ServiceConnection()
{
@Override
public void onServiceConnected(ComponentName name, IBinder service)
{
if (DEBUG) {Log.d(TAG, "onServiceConnected");} //$NON-NLS-1$
mServiceMessenger = new Messenger(service);
Message msg = new Message();
msg.what = VoiceCommandService.MSG_RECOGNIZER_START_LISTENING;
try
{
mServiceMessenger.send(msg);
}
catch (RemoteException e)
{
e.printStackTrace();
}
}
@Override
public void onServiceDisconnected(ComponentName name)
{
if (DEBUG) {Log.d(TAG, "onServiceDisconnected");} //$NON-NLS-1$
mServiceMessenger = null;
}
}; // mServiceConnection
In the service
@Override
public IBinder onBind(Intent intent)
{
Log.d(TAG, "onBind"); //$NON-NLS-1$
return mServerMessenger.getBinder();
}
回答2:
Working example is given below,
MyService.class
public class MyService extends Service implements SpeechDelegate, Speech.stopDueToDelay {
public static SpeechDelegate delegate;
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
//TODO do something useful
try {
if (VERSION.SDK_INT >= VERSION_CODES.KITKAT) {
((AudioManager) Objects.requireNonNull(
getSystemService(Context.AUDIO_SERVICE))).setStreamMute(AudioManager.STREAM_SYSTEM, true);
}
} catch (Exception e) {
e.printStackTrace();
}
Speech.init(this);
delegate = this;
Speech.getInstance().setListener(this);
if (Speech.getInstance().isListening()) {
Speech.getInstance().stopListening();
} else {
System.setProperty("rx.unsafe-disable", "True");
RxPermissions.getInstance(this).request(permission.RECORD_AUDIO).subscribe(granted -> {
if (granted) { // Always true pre-M
try {
Speech.getInstance().stopTextToSpeech();
Speech.getInstance().startListening(null, this);
} catch (SpeechRecognitionNotAvailable exc) {
//showSpeechNotSupportedDialog();
} catch (GoogleVoiceTypingDisabledException exc) {
//showEnableGoogleVoiceTyping();
}
} else {
Toast.makeText(this, R.string.permission_required, Toast.LENGTH_LONG).show();
}
});
}
return Service.START_STICKY;
}
@Override
public IBinder onBind(Intent intent) {
//TODO for communication return IBinder implementation
return null;
}
@Override
public void onStartOfSpeech() {
}
@Override
public void onSpeechRmsChanged(float value) {
}
@Override
public void onSpeechPartialResults(List<String> results) {
for (String partial : results) {
Log.d("Result", partial+"");
}
}
@Override
public void onSpeechResult(String result) {
Log.d("Result", result+"");
if (!TextUtils.isEmpty(result)) {
Toast.makeText(this, result, Toast.LENGTH_SHORT).show();
}
}
@Override
public void onSpecifiedCommandPronounced(String event) {
try {
if (VERSION.SDK_INT >= VERSION_CODES.KITKAT) {
((AudioManager) Objects.requireNonNull(
getSystemService(Context.AUDIO_SERVICE))).setStreamMute(AudioManager.STREAM_SYSTEM, true);
}
} catch (Exception e) {
e.printStackTrace();
}
if (Speech.getInstance().isListening()) {
Speech.getInstance().stopListening();
} else {
RxPermissions.getInstance(this).request(permission.RECORD_AUDIO).subscribe(granted -> {
if (granted) { // Always true pre-M
try {
Speech.getInstance().stopTextToSpeech();
Speech.getInstance().startListening(null, this);
} catch (SpeechRecognitionNotAvailable exc) {
//showSpeechNotSupportedDialog();
} catch (GoogleVoiceTypingDisabledException exc) {
//showEnableGoogleVoiceTyping();
}
} else {
Toast.makeText(this, R.string.permission_required, Toast.LENGTH_LONG).show();
}
});
}
}
@Override
public void onTaskRemoved(Intent rootIntent) {
//Restarting the service if it is removed.
PendingIntent service =
PendingIntent.getService(getApplicationContext(), new Random().nextInt(),
new Intent(getApplicationContext(), MyService.class), PendingIntent.FLAG_ONE_SHOT);
AlarmManager alarmManager = (AlarmManager) getSystemService(Context.ALARM_SERVICE);
assert alarmManager != null;
alarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, 1000, service);
super.onTaskRemoved(rootIntent);
}
}
For more details,
https://github.com/sachinvarma/Speech-Recognizer
Hope this will help someone in future.
来源:https://stackoverflow.com/questions/18039429/android-speech-recognition-continuous-service