1

Derzeit bin ich mit Taschensphinx für die Spracherkennung & ein Visualizer, um meine Wellenmuster von Audio in Mikrofon gesprochen Allerdings kann ich nur eine Sache zu einer Zeit ausführen. weil das Mikrofon vom Visualizer verwendet wird und bei der Spracherkennung nichts passiert. Bitte lesen Sie mein Code & mir helfen simulaniouslyVerwenden Sie Spracherkennung und Visualizer zur gleichen Zeit

beide laufen

Die Haupttätigkeit

public class MainActivity extends AppCompatActivity implements RecognitionListener { 

    //for visualizer permission android 6 
    private boolean permissionToWriteAccepted = false; 
    private String [] permissions = {"android.permission.RECORD_AUDIO", "android.permission.WRITE_EXTERNAL_STORAGE"}; 



    //Name searches allow to quickly reconfigure the decoder 

    private static final String SENTENCE_SEARCH = "sentence"; 

    //used to handle permission request 

    private static final int PERMISSIONS_REQUEST_RECORD_AUDIO = 1; 

    private SpeechRecognizer recognizer; 
    // private Snackbar snackbar; 

    //for visualizer 


    public static final String DIRECTORY_NAME_TEMP = "AudioTemp"; 
    public static final int REPEAT_INTERVAL = 40; 
    // private TextView txtRecord; 

    VisualizerView visualizerView; 

    private MediaRecorder recorder = null; 

    File audioDirTemp; 
    private boolean isRecording = false; 


    private Handler handler; // Handler for updating the visualizer 
    // private boolean recording; // are we currently recording? 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 
     Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); 
     setSupportActionBar(toolbar); 
     visualizerView = (VisualizerView) findViewById(R.id.visualizer); 

     // Add the following code to your onCreate 
     int requestCode = 200; 
     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { 
      requestPermissions(permissions, requestCode); 
     } 


     // Check if user has given permission to record audio 
     int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO); 
     if (permissionCheck == PackageManager.PERMISSION_DENIED) { 
      ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, PERMISSIONS_REQUEST_RECORD_AUDIO); 
      return; 
     } 


     //directory for the audio recorder 
     audioDirTemp = new File(Environment.getExternalStorageDirectory(), 
       DIRECTORY_NAME_TEMP); 
     if (audioDirTemp.exists()) { 
      deleteFilesInDir(audioDirTemp); 
     } else { 
      audioDirTemp.mkdirs(); 
     } 

     // create the Handler for visualizer update 
     handler = new Handler(); 


     FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); 
     fab.setOnClickListener(new View.OnClickListener() { 
      @Override 
      public void onClick(View view) { 

       runRecognizerSetup(); 



       if (!isRecording) { 
        // isRecording = true; 

        // txtRecord.setText("Stop Recording"); 

        recorder = new MediaRecorder(); 

        recorder.setAudioSource(MediaRecorder.AudioSource.MIC); 
        recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); 
        recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); 
        recorder.setOutputFile(audioDirTemp + "/audio_file" 
          + ".mp3"); 

        MediaRecorder.OnErrorListener errorListener = null; 
        recorder.setOnErrorListener(errorListener); 
        MediaRecorder.OnInfoListener infoListener = null; 
        recorder.setOnInfoListener(infoListener); 

        try { 
         recorder.prepare(); 
         recorder.start(); 
         isRecording = true; // we are currently recording 
        } catch (IllegalStateException e) { 
         e.printStackTrace(); 
        } catch (IOException e) { 
         e.printStackTrace(); 
        } 
        handler.post(updateVisualizer); 

       } else { 

        // txtRecord.setText("Start Recording"); 

        releaseRecorder(); 
       } 


       Snackbar.make(view, "Pleas speak clearly to the microphone", Snackbar.LENGTH_INDEFINITE) 
         .setAction("DISMISS", new View.OnClickListener() { 
          @Override 
          public void onClick(View v) { 

          } 
         }).show(); 


       // runRecognizerSetup(); 

      } 
     }); 


     //first textbox 

     ((TextView) findViewById(R.id.caption_text)).setText("Speak in Nepali"); 


     //run the recognizer 

     // runRecognizerSetup(); 


    } 

    //for visualizer 
    private void releaseRecorder() { 
     if (recorder != null) { 
      isRecording = false; // stop recording 
      handler.removeCallbacks(updateVisualizer); 
      visualizerView.clear(); 
      recorder.stop(); 
      recorder.reset(); 
      recorder.release(); 
      recorder = null; 
     } 
    } 

    //for VIsualizer 

    public static boolean deleteFilesInDir(File path) { 

     if (path.exists()) { 
      File[] files = path.listFiles(); 
      if (files == null) { 
       return true; 
      } 
      for (int i = 0; i < files.length; i++) { 

       if (files[i].isDirectory()) { 

       } else { 
        files[i].delete(); 
       } 
      } 
     } 
     return true; 
    } 

    @Override 
    public void onRequestPermissionsResult(int requestCode, 
              String[] permissions, int[] grantResults) { 
     super.onRequestPermissionsResult(requestCode, permissions, grantResults); 

     switch (requestCode){ 
      case 200: 
       // permissionToRecordAccepted = grantResults[0] == PackageManager.PERMISSION_GRANTED; 
       permissionToWriteAccepted = grantResults[1] == PackageManager.PERMISSION_GRANTED; 
       break; 
     } 

     if (requestCode == PERMISSIONS_REQUEST_RECORD_AUDIO) { 
      if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { 
       runRecognizerSetup(); 
      } else { 
       finish(); 
      } 
     } 
    } 


    private void runRecognizerSetup() { 
     //execute it as a async task as it is time consuming 

     new AsyncTask<Void, Void, Exception>() { 
      @Override 
      protected Exception doInBackground(Void... params) { 
       try { 
        Assets assets = new Assets(MainActivity.this); 
        File assetsDir = assets.syncAssets(); 
        setupRecognizer(assetsDir); 

       } catch (IOException e) { 
        return e; 
       } 
       return null; 
      } 

      @Override 
      protected void onPostExecute(Exception e) { 
       if (e != null) { 
        ((TextView) findViewById(R.id.caption_text)) 
          .setText("Failed " + e); 
       } else { 
        switchSearch(SENTENCE_SEARCH); 
       } 
      } 
     }.execute(); 
    } 

    private void switchSearch(String searchName) { 
     recognizer.stop(); 
     searchName = SENTENCE_SEARCH; 
     recognizer.startListening(searchName, 2000); 

    } 


    private void setupRecognizer(File assetsDir) throws IOException { 

     //SET UP THE FILE to be recognized 

     recognizer = SpeechRecognizerSetup.defaultSetup() 
       .setAcousticModel(new File(assetsDir, "loadshedding.ci_cont")) 
       .setDictionary(new File(assetsDir, "loadshedding.dict")) 
       .getRecognizer(); 
     recognizer.addListener(this); 

     //for grammar 
// 
//  File sentenceGrammar = new File(assetsDir,"loadshedding.gram"); 
//  recognizer.addGrammarSearch(SENTENCE_SEARCH,sentenceGrammar); 

     //for continuous model 

     File languageModel = new File(assetsDir, "loadshedding.lm.DMP"); 
     recognizer.addNgramSearch(SENTENCE_SEARCH, languageModel); 


    } 


    @Override 
    public boolean onCreateOptionsMenu(Menu menu) { 
     // Inflate the menu; this adds items to the action bar if it is present. 
     getMenuInflater().inflate(R.menu.menu_main, menu); 
     return true; 
    } 

    @Override 
    public boolean onOptionsItemSelected(MenuItem item) { 
     // Handle action bar item clicks here. The action bar will 
     // automatically handle clicks on the Home/Up button, so long 
     // as you specify a parent activity in AndroidManifest.xml. 
     int id = item.getItemId(); 

     //noinspection SimplifiableIfStatement 
     if (id == R.id.action_settings) { 
      return true; 
     } 

     return super.onOptionsItemSelected(item); 
    } 

    @Override 
    public void onBeginningOfSpeech() { 

    } 

    @Override 
    public void onEndOfSpeech() { 
     if (!recognizer.getSearchName().equals(SENTENCE_SEARCH)) 
      switchSearch(SENTENCE_SEARCH); 
     Toast.makeText(this, "onEndofSpeech", Toast.LENGTH_SHORT).show(); 

    } 

    @Override 
    public void onPartialResult(Hypothesis hypothesis) { 
     if (hypothesis == null) 
      return; 

     String text = hypothesis.getHypstr(); 
     ((TextView) findViewById(R.id.result_text)).setText(text); 
     Toast.makeText(this, "onPartialResult", Toast.LENGTH_SHORT).show(); 


    } 

    @Override 
    public void onResult(Hypothesis hypothesis) { 
// 
//  if (hypothesis ==null) 
//   return; 
// 
//  String text = hypothesis.getHypstr(); 
//  ((TextView)findViewById(R.id.result_text)).setText(text); 


//  ((TextView) findViewById(R.id.result_text)).setText(""); 
//  Toast.makeText(this,"onResult",Toast.LENGTH_SHORT).show(); 


    } 

    @Override 
    public void onError(Exception e) { 
     ((TextView) findViewById(R.id.caption_text)).setText(e.getMessage()); 

    } 

    @Override 
    public void onTimeout() { 
     switchSearch(SENTENCE_SEARCH); 

    } 

    @Override 
    protected void onDestroy() { 
     super.onDestroy(); 
     if (recognizer != null) { 
      recognizer.cancel(); 
      recognizer.shutdown(); 
     } 

     releaseRecorder(); 
    } 

    // updates the visualizer every 50 milliseconds 
    Runnable updateVisualizer = new Runnable() { 
     @Override 
     public void run() { 
      if (isRecording) // if we are already recording 
      { 
       // get the current amplitude 
       int x = recorder.getMaxAmplitude(); 
       visualizerView.addAmplitude(x); // update the VisualizeView 
       visualizerView.invalidate(); // refresh the VisualizerView 

       // update in 40 milliseconds 
       handler.postDelayed(this, REPEAT_INTERVAL); 
      } 
     } 
    }; 
} 

Diese Visualizer Ansicht ist

public class VisualizerView extends View { 
    private static final int LINE_WIDTH = 1; // width of visualizer lines 
    private static final int LINE_SCALE = 75; // scales visualizer lines 
    private List<Float> amplitudes; // amplitudes for line lengths 
    private int width; // width of this View 
    private int height; // height of this View 
    private Paint linePaint; // specifies line drawing characteristics 

    // constructor 
    public VisualizerView(Context context, AttributeSet attrs) { 
     super(context, attrs); // call superclass constructor 
     linePaint = new Paint(); // create Paint for lines 
     linePaint.setColor(Color.GREEN); // set color to green 
     linePaint.setStrokeWidth(LINE_WIDTH); // set stroke width 
    } 

    // called when the dimensions of the View change 
    @Override 
    protected void onSizeChanged(int w, int h, int oldw, int oldh) { 
     width = w; // new width of this View 
     height = h; // new height of this View 
     amplitudes = new ArrayList<Float>(width/LINE_WIDTH); 
    } 

    // clear all amplitudes to prepare for a new visualization 
    public void clear() { 
     amplitudes.clear(); 
    } 

    // add the given amplitude to the amplitudes ArrayList 
    public void addAmplitude(float amplitude) { 
     amplitudes.add(amplitude); // add newest to the amplitudes ArrayList 

     // if the power lines completely fill the VisualizerView 
     if (amplitudes.size() * LINE_WIDTH >= width) { 
      amplitudes.remove(0); // remove oldest power value 
     } 
    } 

    // draw the visualizer with scaled lines representing the amplitudes 
    @Override 
    public void onDraw(Canvas canvas) { 
     int middle = height/2; // get the middle of the View 
     float curX = 0; // start curX at zero 

     // for each item in the amplitudes ArrayList 
     for (float power : amplitudes) { 
      float scaledHeight = power/LINE_SCALE; // scale the power 
      curX += LINE_WIDTH; // increase X by LINE_WIDTH 

      // draw a line representing this item in the amplitudes ArrayList 
      canvas.drawLine(curX, middle + scaledHeight/2, curX, middle 
        - scaledHeight/2, linePaint); 
     } 
    } 

} 

Diese Das Layout

ist
<?xml version="1.0" encoding="utf-8"?> 
<RelativeLayout 
    xmlns:android="http://schemas.android.com/apk/res/android" 
    xmlns:tools="http://schemas.android.com/tools" 
    xmlns:app="http://schemas.android.com/apk/res-auto" 
    android:id="@+id/content_main" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" 
    android:paddingLeft="@dimen/activity_horizontal_margin" 
    android:paddingRight="@dimen/activity_horizontal_margin" 
    android:paddingTop="@dimen/activity_vertical_margin" 
    android:paddingBottom="@dimen/activity_vertical_margin" 
    app:layout_behavior="@string/appbar_scrolling_view_behavior" 
    tools:showIn="@layout/activity_main" 
    tools:context="pannam.loadsheddingfinal.MainActivity"> 

    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" 
     android:layout_width="fill_parent" 
     android:layout_height="fill_parent" 
     android:orientation="vertical" 
     android:padding="16dp" > 

     <TextView 
      android:id="@+id/caption_text" 
      android:layout_width="wrap_content" 
      android:layout_height="wrap_content" 
      android:layout_gravity="center_horizontal" 
      android:textAppearance="?android:attr/textAppearanceLarge" 
      android:typeface="sans" /> 

     <LinearLayout 
      android:layout_width="match_parent" 
      android:layout_height="0dp" 

      android:layout_weight="1" 
      android:gravity="center" > 

      <TextView 
       android:id="@+id/result_text" 
       android:layout_width="wrap_content" 
       android:layout_height="wrap_content" 
       android:textAppearance="?android:attr/textAppearanceMedium" /> 
     </LinearLayout> 
     <pannam.loadsheddingfinal.VisualizerView 
      android:id="@+id/visualizer" 
      android:layout_width="220dp" 
      android:layout_height="75dp" 
      android:layout_centerHorizontal="true" 
      android:layout_margin="5dp" 
      /> 
    </LinearLayout> 
</RelativeLayout> 

Antwort

0

Sie kann beide Dinge nicht ohne eine Spracherkennung der Spracherkennung durchführen. Sie müssen die Klasse SpeechRecognizer in Ihre Quellen kopieren und stattdessen ändern.

decoder.processRaw(buffer, nread, false, false); 
// This is a code to add together with new AmplitudeEvent 
int max = 0; 
for (int i = 0; i < nread; i++) { 
    max = Math.max(max, Math.abs(buffer[i])); 
}   
mainHandler.post(new AmplitudeEvent(max)); 

final Hypothesis hypothesis = decoder.hyp(); 
mainHandler.post(new ResultEvent(hypothesis, false)); 

...

private class AmplitudeEvent extends RecognitionEvent { 
    private final int amplitude; 

    Amplitude(int amplitude) { 
     this.amplitude = amplitude; 
    } 

    @Override 
    protected void execute(RecognitionListener listener) { 
     listener.onAmplitude(amplitude); 
    } 
} 
: Zum Beispiel, können Sie Mitteilung über Amplitude mit jedem Teilergebnis senden
Verwandte Themen