2016-07-18 4 views
0

Ich arbeite an einer Android Wear App. Diese App zeichnet lokales Audio vom Mikrofon der Smartwatch auf und sendet es an das Handle-Gerät. Das Handle-Gerät empfängt Daten und schreibt eine WAV-Datei. Die Datei ist erstellt, aber wenn ich höre die Datei ist leer, kann ich nur Stille hören.schreiben .wav leer nach dem Empfang von Daten aus der Abnutzung

dies ist der Verschleiß Code:

public void replyAudioByByte(final byte data[]) { 


      final String path = "/start_activity"; 
      final Byte[] text= new Byte[1024]; 


      GoogleApiClient client = new GoogleApiClient.Builder(getApplicationContext()) 
        .addApi(Wearable.API) 
        .build(); 
      new Thread(new Runnable() { 
       @Override 
       public void run() { 

        NodeApi.GetConnectedNodesResult nodes = Wearable.NodeApi.getConnectedNodes(mApiClient).await(); 

        for (Node node : nodes.getNodes()) { 

         MessageApi.SendMessageResult result = Wearable.MessageApi.sendMessage(
           mApiClient, node.getId(),AUDIO_RECORDER, data).await(); 
         if (result.getStatus().isSuccess()) { 
          Log.d("sendMessage","Message send!!"); 
          for (int j=0; j<data.length; j++){ 
          Log.v("Mobile", "Message: {" + data[j] + "} sent to: " + node.getDisplayName()); 
          } 

         } else { 
          // Log an error 
          Log.v("Mobile", "ERROR: failed to send Message"); 
         } 
        } 

       } 
      }).start(); 
      client.disconnect(); 
      Log.d("MOBILE", "send message end"); 
     } 


     public void startRecordingAudio() { 


      recorder = findAudioRecord(); 
      Log.d("recorder:","recorder="+recorder.toString()); 

      CountDownTimer countDowntimer = new CountDownTimer(8000, 1000) { 
       public void onTick(long millisUntilFinished) { 
       } 

       public void onFinish() { 
        try { 
         //Toast.makeText(getBaseContext(), "Stop recording Automatically ", Toast.LENGTH_LONG).show(); 
         Log.d("wear", "stopRecorder=" + System.currentTimeMillis()); 
         recorder.stop(); 
         Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate="); 
         Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate()); 
         isRecording=false; 
         replyAudioByByte(data); 
         for (int j=0; j< data.length;j++){ 
          Log.d("watch audio registrato", "data[]="+data[j]); 
         } 

         Log.d("wear", "recorder.stop ok!"); 
        } catch (Exception e) { 
         // TODO Auto-generated catch block 
         Log.e("wear", "recorder.stop catch"); 
         e.printStackTrace(); 
        } 


       } 
      }; 


      recorder.startRecording(); 
      countDowntimer.start(); 
      Log.d("wear", "startRecorder=" + System.currentTimeMillis()); 

      isRecording = true; 

      recordingThread = new Thread(new Runnable() { 
       public void run() { 

        while (isRecording) { 
        recorder.read(data, 0, bufferSize); 
         Log.d("WEAR","recorder.read="+recorder.read(data, 0, bufferSize)); 
        } 
        recorder.stop(); 
        recorder.release(); 

        for (int i = 0; i < bufferSize; i++) { 

         Log.d("startrecording", "data=" + data[i]); 
        } 


       } 
      }, "AudioRecorder Thread"); 

      recordingThread.start(); 

      int a= recorder.getSampleRate(); 

      Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate="+a); 
      Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate()); 

     } 


     public AudioRecord findAudioRecord() { 
      /** The settings that i must use are not the same for every device, so i try if they work */ 
      for (int rate : mSampleRates) { 
       for (short audioFormat : audioF) { 
        for (short channelConfig : channelC) { 
         try { 
          //Log.d("Check", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); 
          int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); 
          if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { 
           //It checks if it can instantiate the audiorecorder without problems 
           AudioRecord recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize + 2000); 
           Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.MIC); 
           Log.d("AudioRecorder data","Rate="+ rate); 
           Log.d("AudioRecorder data","Channel.config="+ channelConfig); 
           Log.d("AudioRecorder data","AudioFormat= "+audioFormat); 
           bufferSize=bufferSize+2000; 
           Log.d("AudioRecorder data","buffersize="+ bufferSize); 

           if (recorder.getState() == AudioRecord.STATE_INITIALIZED) 
            Log.d("audiorec","rate="+rate); 

            return recorder; 
          } 
         } catch (Exception e) { 
          Log.e("Check", rate + "Exception, keep trying.", e); 
         } 
        } 
       } 
      } 
      return null; 
     } 

dies ist der Griff Code:

public Void doInBackground(byte [] dataToWrite) { 
     Log.d("doInBackground","entrato"); 

      byte data[] = new byte[bufferSize]; 
      String tempfilename = ""; 
      FileOutputStream os = null; 
      //if(allowRecorder){ 
       tempfilename = getTempFilename(); 
       Log.d("doInBackground","getTempFilename=" +tempfilename.toString()); 
       try { 
        os = new FileOutputStream(tempfilename); 
        Log.d("doInBackground","os new ok"); 
       } catch (FileNotFoundException e) { 
        e.printStackTrace(); 
       } 


      dbData = new ArrayList<Double>(); 


      Log.d("doInBackGround", "dateToWrite.length=" + dataToWrite.length); 
      for (int j = 0; j < dataToWrite.length; j++) { 

        try { 
        os.write(dataToWrite); 
        Log.d("os,write", "dataToWrite"); 
       } catch (IOException e) { 
        e.printStackTrace(); 
       } 
      } 

       if(data[data.length-1]!=0){ 
        double Db = 20 * Math.log10(Math.abs((data[data.length-1]/51805.5336)/0.00002)); 
        dbData.add(Db); 
       } 



       try { 
        os.close(); 
        Log.d("os.close", "dataToWrite"); 
        copyWaveFile(tempfilename,getFilename()); 
        deleteTempFile(); 

       } catch (IOException e) { 
        e.printStackTrace(); 
       } 

     return null; 
    } 

private void copyWaveFile(String inFilename,String outFilename){ 
     FileInputStream in = null; 
     FileOutputStream out = null; 
     long totalAudioLen = 0; 
     long totalDataLen = 0; 
     long longSampleRate = 8000; 
     System.out.println("SAMPLE RATE = "+longSampleRate); 
     int channels = 12; 
     audioFormat = 16; 

     long byteRate = audioFormat * longSampleRate * channels/8; 

     byte[] data = new byte[bufferSize]; 

     try { 
      in = new FileInputStream(inFilename); 
      out = new FileOutputStream(outFilename); 
      totalAudioLen = in.getChannel().size(); 
      totalDataLen = totalAudioLen + 36; 
      Log.d("RecorderRead","totalAudioLen=" +totalAudioLen); 
      Log.d("RecorderRead","totalDatalen=" +totalDataLen); 
      System.out.println("Temp File size: " + totalDataLen); 

      Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.DEFAULT); 
      Log.d("AudioRecorder data","Rate="+ longSampleRate); 
      Log.d("AudioRecorder data","Channel.config="+ channels); 
      Log.d("AudioRecorder data","AudioFormat= "+audioFormat); 
      //bufferSize=bufferSize+2000; 
      Log.d("AudioRecorder data","buffersize="+ bufferSize); 


      if(totalDataLen != 36){ 
       writeWaveFileHeader(out, totalAudioLen, totalDataLen, 
         longSampleRate, channels, byteRate); 
       Log.d("writeWAVEFILE", "chiamato"); 
       while(in.read(data) != -1){ 
        out.write(data); 
       } 
       System.out.println("Wav File size: " + out.getChannel().size()); 
      } 
      else{ 
       System.out.println("Non creo il file .wav"); 
      } 

      in.close(); 
      out.close(); 
     } catch (FileNotFoundException e) { 
      e.printStackTrace(); 
     } catch (IOException e) { 
      e.printStackTrace(); 
     } 
    } 



private void writeWaveFileHeader(
      FileOutputStream out, long totalAudioLen, 
      long totalDataLen, long longSampleRate, int channels, 
      long byteRate) throws IOException { 

     byte[] header = new byte[44]; 

     header[0] = 'R'; // RIFF/WAVE header 
     header[1] = 'I'; 
     header[2] = 'F'; 
     header[3] = 'F'; 
     header[4] = (byte) (totalDataLen & 0xff); 
     header[5] = (byte) ((totalDataLen >> 8) & 0xff); 
     header[6] = (byte) ((totalDataLen >> 16) & 0xff); 
     header[7] = (byte) ((totalDataLen >> 24) & 0xff); 
     header[8] = 'W'; 
     header[9] = 'A'; 
     header[10] = 'V'; 
     header[11] = 'E'; 
     header[12] = 'f'; // 'fmt ' chunk 
     header[13] = 'm'; 
     header[14] = 't'; 
     header[15] = ' '; 
     header[16] = 16; // 4 bytes: size of 'fmt ' chunk 
     header[17] = 0; 
     header[18] = 0; 
     header[19] = 0; 
     header[20] = 1; // format = 1 
     header[21] = 0; 
     header[22] = (byte) channels; 
     header[23] = 0; 
     header[24] = (byte) (longSampleRate & 0xff); 
     header[25] = (byte) ((longSampleRate >> 8) & 0xff); 
     header[26] = (byte) ((longSampleRate >> 16) & 0xff); 
     header[27] = (byte) ((longSampleRate >> 24) & 0xff); 
     header[28] = (byte) (byteRate & 0xff); 
     header[29] = (byte) ((byteRate >> 8) & 0xff); 
     header[30] = (byte) ((byteRate >> 16) & 0xff); 
     header[31] = (byte) ((byteRate >> 24) & 0xff); 
     header[32] = (byte) (2 * 16/8); // block align 
     header[33] = 0; 
     header[34] = (byte) audioFormat; // bits per sample 
     header[35] = 0; 
     header[36] = 'd'; 
     header[37] = 'a'; 
     header[38] = 't'; 
     header[39] = 'a'; 
     header[40] = (byte) (totalAudioLen & 0xff); 
     header[41] = (byte) ((totalAudioLen >> 8) & 0xff); 
     header[42] = (byte) ((totalAudioLen >> 16) & 0xff); 
     header[43] = (byte) ((totalAudioLen >> 24) & 0xff); 

     out.write(header, 0, 44); 
    } 

Verschleiß mainifest Ich habe

<uses-permission android:name="android.permission.RECORD_AUDIO" /> 
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" /> 
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> 

in Griff manifest Ich habe

<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> 

wenn ich App im Logfile des Verschleißes app laufen habe ich diesen Fehler:

AudioRecord-JNI: Error -4 during AudioRecord native read 

Was sollte ich tun, um es zu lösen? kann mir jemand helfen? Was ist los mit dir? Jede Art von Hilfe wird geschätzt, Code oder Tutorial. Vielen Dank im Voraus

Antwort

0

Sie müssen den Audio-Encoder für die Aufnahme einstellen.

void setAudioEncoder (int audio_encoder)

Wenn diese Methode nicht aufgerufen wird, wird die Ausgabedatei nicht eine Audiospur enthalten. Rufen Sie dies nach setOutputFormat() aber vor prepare().

Versuchen Sie, folgen Sie den Schritten unten aufgeführt:

Hier ist ein Beispielcode, wie Audio aufnehmen und das aufgenommene Audio spielen: https://developer.android.com/guide/topics/media/audio-capture.html#example

Verwandte Themen