2017-04-09 5 views
0

Ich versuche eine Spracherkennung ohne den Dialog zu programmieren. Für den ersten Anruf von SpeechRecognizer.startListening(recognizerIntent) funktioniert alles einwandfrei und ich bekomme Ergebnisse mit der erkannten Sprache String. Aber wenn ich die Erkennung zum zweiten Mal starten will, wird nur der Erkennungsstartton abgespielt und wenn ich aufhört zu sprechen, wird der Erkennungsende-Ton ausgegeben. Es werden keine Callback-Methoden wie onResults(), onRmsChanged oder onError usw. aufgerufen.SpeechRecognizer gibt Sound wieder, liefert aber keine Ergebnisse beim Aufruf von startListening()

Die Aktivität:

package com.example.plucinst.emojat; 

import android.app.Activity; 
import android.content.Intent; 
import android.content.SharedPreferences; 
import android.net.ConnectivityManager; 
import android.net.NetworkInfo; 
import android.os.Bundle; 
import android.preference.PreferenceManager; 
import android.speech.RecognitionListener; 
import android.speech.RecognizerIntent; 
import android.speech.SpeechRecognizer; 
import android.support.v7.app.AppCompatActivity; 
import android.util.Log; 
import android.view.View; 
import android.widget.Button; 
import android.widget.CompoundButton; 
import android.widget.ImageButton; 
import android.widget.LinearLayout; 
import android.widget.ProgressBar; 
import android.widget.Switch; 
import android.widget.Toast; 

import java.util.ArrayList; 
import java.util.Arrays; 
import java.util.List; 

/** 
* Created by thomasplucinski on 08/04/2017. 
*/ 

public class StartActivity extends AppCompatActivity implements RecognitionListener { 

    private Button btnGoToChat; 
    private ImageButton btnSpeechControl; 
    private LinearLayout speechControlContainer; 
    private Switch switchMode; 
    private Activity context; 
    private String firstMatchText; 
    private SpeechRecognizer speechRecognizer = null; 
    private ProgressBar progressBar; 
    private Intent recognizerIntent; 
    private String messageText = ""; 
    private GlobalSetting globalSetting; 
    private ArrayList<String> matchesText; 
    private List<String> sendStringArray; 
    private String recognizedContactName = "Gollum"; //For the prototyp the contact isn't recognized via speech but set as static value 
    private static final int REQUEST_CODE_SPEECH = 1234; 
    private static final int REQUEST_CODE_DETECTION = 0000; 
    private static final String LOG_TAG = "START_ACTIVITY"; 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     context = this; 
     globalSetting = GlobalSetting.getInstance(); 
     sendStringArray = Arrays.asList(getResources().getStringArray(R.array.send)); 
     setContentView(R.layout.activity_start); 
     PreferenceManager.setDefaultValues(this, R.xml.preferences_app_settings, false); 
     SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this); 

     //Intent intent = new Intent(this, InboxActivity.class); 
     //startActivity(intent); 
     // 

     initUI(); 

     // Init the speechRecognition 
     speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); 
     speechRecognizer.setRecognitionListener(this); 
     recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
     recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "de-DE"); 
     recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName()); 
     recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); 
     recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 3); 


    } 

    @Override 
    protected void onResume(){ 
     super.onResume(); 
     if (globalSetting.speechRecognitionActive){ 
      startSpeechRecognition(); 
     } 
    } 

    /** 
    * Checks if device ist conntected to the internet 
    * @return 
    */ 
    public boolean isConnected() 
    { 
     ConnectivityManager cm = (ConnectivityManager) getSystemService(context.CONNECTIVITY_SERVICE); 
     NetworkInfo net = cm.getActiveNetworkInfo(); 
     if (net!=null && net.isAvailable() && net.isConnected()) { 
      return true; 
     } else { 
      return false; 
     } 
    } 

    /** 
    * Initializes the UI elements and the listeners 
    */ 
    private void initUI(){ 
     btnGoToChat = (CustomButton) findViewById(R.id.btn_go_to_chat); 
     btnSpeechControl = (ImageButton) findViewById(R.id.btn_speech_control); 
     switchMode = (Switch) findViewById(R.id.switch_app_mode); 
     progressBar = (ProgressBar) findViewById(R.id.speech_progress_bar); 
     progressBar.setIndeterminate(false); 
     speechControlContainer = (LinearLayout) findViewById(R.id.speech_control_container); 
     if (btnGoToChat!=null){ 
      btnGoToChat.setOnClickListener(new Button.OnClickListener(){ 
       @Override 
       public void onClick(View v) { 
        goToInbox(); 

       } 
      }); 
     } 

     if (btnSpeechControl!=null){ 
      btnSpeechControl.setOnClickListener(new Button.OnClickListener(){ 
       @Override 
       public void onClick(View v) { 
        if(isConnected()){ 
         startSpeechRecognition(); 
         globalSetting.speechRecognitionActive = true; 
        } 
        else{ 
         Toast.makeText(getApplicationContext(), "Please Connect to Internet", Toast.LENGTH_LONG).show(); 
         globalSetting.speechRecognitionActive = false; 
        } 
        //goToInbox(); 

       } 
      }); 
     } 

     switchMode.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { 
      public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { 
       // do something, the isChecked will be 
       // true if the switch is in the On position 
       if (isChecked){ 
        speechControlContainer.setVisibility(View.VISIBLE); 
        btnGoToChat.setVisibility(View.GONE); 
       }else{ 
        speechControlContainer.setVisibility(View.GONE); 
        btnGoToChat.setVisibility(View.VISIBLE); 
       } 
      } 
     }); 

    } 

    private void startSpeechRecognition(){ 


     /* 
     Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 
     intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, 
       RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); 
     startActivityForResult(intent, REQUEST_CODE_SPEECH); 
     */ 
     speechRecognizer.stopListening(); 
     Log.d(LOG_TAG, " call startSpeechRecognition"); 
     speechRecognizer.startListening(recognizerIntent); 
    } 


    @Override 
    protected void onPause() { 
     super.onPause(); 
     if (speechRecognizer != null) { 
      speechRecognizer.destroy(); 
      Log.i(LOG_TAG, "destroy"); 
     } 

    } 

    @Override 
    public void onBeginningOfSpeech() { 
     Log.i(LOG_TAG, "onBeginningOfSpeech"); 
     progressBar.setIndeterminate(false); 
     progressBar.setMax(10); 
    } 

    @Override 
    public void onBufferReceived(byte[] buffer) { 
     Log.i(LOG_TAG, "onBufferReceived: " + buffer); 
    } 

    @Override 
    public void onEndOfSpeech() { 
     Log.i(LOG_TAG, "onEndOfSpeech"); 
     progressBar.setIndeterminate(true); 
     //toggleButton.setChecked(false); 
    } 

    @Override 
    public void onError(int errorCode) { 
     String errorMessage = getErrorText(errorCode); 
     Log.d(LOG_TAG, "FAILED " + errorMessage); 
     //returnedText.setText(errorMessage); 
     //toggleButton.setChecked(false); 
    } 

    @Override 
    public void onEvent(int arg0, Bundle arg1) { 
     Log.i(LOG_TAG, "onEvent"); 
    } 

    @Override 
    public void onPartialResults(Bundle arg0) { 
     Log.i(LOG_TAG, "onPartialResults"); 
    } 

    @Override 
    public void onReadyForSpeech(Bundle arg0) { 
     Log.i(LOG_TAG, "onReadyForSpeech"); 
    } 

    @Override 
    public void onResults(Bundle results) { 
     Log.i(LOG_TAG, "onResults"); 
     progressBar.setIndeterminate(true); 
     matchesText = results 
       .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); 
     processSpeechResults(); 

     //returnedText.setText(text); 
    } 

    @Override 
    public void onRmsChanged(float rmsdB) { 
     Log.i(LOG_TAG, "onRmsChanged: " + rmsdB); 
     progressBar.setProgress((int) rmsdB); 
    } 

    public static String getErrorText(int errorCode) { 
     String message; 
     switch (errorCode) { 
      case SpeechRecognizer.ERROR_AUDIO: 
       message = "Audio recording error"; 
       break; 
      case SpeechRecognizer.ERROR_CLIENT: 
       message = "Client side error"; 
       break; 
      case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: 
       message = "Insufficient permissions"; 
       break; 
      case SpeechRecognizer.ERROR_NETWORK: 
       message = "Network error"; 
       break; 
      case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: 
       message = "Network timeout"; 
       break; 
      case SpeechRecognizer.ERROR_NO_MATCH: 
       message = "No match"; 
       break; 
      case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: 
       message = "RecognitionService busy"; 
       break; 
      case SpeechRecognizer.ERROR_SERVER: 
       message = "error from server"; 
       break; 
      case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: 
       message = "No speech input"; 
       break; 
      default: 
       message = "Didn't understand, please try again."; 
       break; 
     } 
     return message; 
    } 

    private void processSpeechResults(){ 

     firstMatchText = matchesText.get(0); //because recognizerIntent returns a list of proposed matches to the recognized text and we only need the first 
     Log.d("STARTACTIVITY", "recognized text: " + firstMatchText); 

     //Do sth. 
    } 


} 

Hat warum jemand eine Idee?

Vielen Dank im Voraus!

Antwort

0

Verwenden Sie die Intent Instanz nicht erneut, Sie müssen bei jedem Start der Erkennung eine neue Instanz erstellen.

Verwandte Themen