2016-09-15 13 views
-2

Audiotrackが機能しない理由を確認できますか?オーディオトラックに出入りするバッファがあります。AudioTrackが動作していない、サウンドがありません

public class MainActivity extends AppCompatActivity { 

private MediaExtractor extractor; 
private MediaCodec decoder; 
private Surface surface; 
private byte[] b; 
AudioManager audioManager; 

@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.activity_main); 

    Button button = (Button)findViewById(R.id.button); 
    button.setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      run(); 
     } 
    }); 
} 

public void run() { 
    extractor = new MediaExtractor(); 
    AssetFileDescriptor sampleFD = getResources().openRawResourceFd(R.raw.pinkfloyd); 
    try { 
     extractor.setDataSource(sampleFD.getFileDescriptor(), sampleFD.getStartOffset(), sampleFD.getLength()); 
    } catch (IOException e) { 
     e.printStackTrace(); 
    } 

    for (int i = 0; i < extractor.getTrackCount(); i++) { 
     MediaFormat format = extractor.getTrackFormat(i); 
     String mime = format.getString(MediaFormat.KEY_MIME); 
     if (mime.startsWith("audio/")) { 
      extractor.selectTrack(i); 
      try { 
       decoder = MediaCodec.createDecoderByType(mime); 
      } catch (IOException e) { 
       e.printStackTrace(); 
      } 
      decoder.configure(format, surface, null, 0); 
      break; 
     } 
    } 

    if (decoder == null) { 
     Log.e("DecodeActivity", "Can't find video info!"); 
     return; 
    } 

    decoder.start(); 

    ByteBuffer inputBuffers [] = decoder.getInputBuffers(); 
    ByteBuffer outputBuffers [] = decoder.getOutputBuffers(); 

    audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); 
    audioManager.setMode(AudioManager.MODE_CURRENT); 
    audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0); 
    int lengthOfAudioClip = outputBuffers.length; 
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, lengthOfAudioClip, AudioTrack.MODE_STREAM); 

    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 
    audioTrack.play(); 
    boolean isEOS = false; 

    while (!Thread.interrupted()) { 
     if (!isEOS) { 
      int inIndex = decoder.dequeueInputBuffer(10000); 
      if (inIndex >= 0) { 
       ByteBuffer buffer = inputBuffers[inIndex]; 
       decoder.getInputBuffer(inIndex); 
       int sampleSize = extractor.readSampleData(buffer, 0); 

       if (sampleSize < 0) { 
        Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); 
        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); 
        isEOS = true; 
       } else { 
        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); 
        extractor.advance(); 
       } 
      } 
     } 

     int outIndex = decoder.dequeueOutputBuffer(info, 10000); 
     switch (outIndex) 
     { 
      case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 
       Log.d("DecodeActivity", "New format " + decoder.getOutputFormat()); 
       break; 
      case MediaCodec.INFO_TRY_AGAIN_LATER: 
       Log.d("DecodeActivity", "dequeueOutputBuffer timed out!"); 
       break; 
      default: 
       ByteBuffer buffer = outputBuffers[outIndex]; 
       Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer); 
       b = new byte[info.size-info.offset]; 

       Log.d("LOGGING FOR B", b + ""); 
       audioTrack.write(b, 0, outputBuffers.length); 
       decoder.releaseOutputBuffer(outIndex, true); 

       Log.d("LOGGING FOREST KEEP OUT", outIndex + ""); 
       Log.d("LOG STATE", audioTrack.getState() + ""); 
       Log.d("LOG STREAMTYPE", audioTrack.getStreamType() + ""); 
       break; 
     } 

     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 
      Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); 

      audioTrack.flush(); 
      audioTrack.release(); 

      break; 
     } 
    } 

    Log.d("LOGGING FOR INPUT", inputBuffers + ""); 
    Log.d("LOGGING FOR OUTPUT", outputBuffers + ""); 
    Log.d("OUTLENGTH", outputBuffers.length + ""); 
    Log.d("SIZE OF B", b.length + ""); 

//  AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 44100, AudioTrack.MODE_STREAM); 
//  audioTrack.getSampleRate(); 

    decoder.stop(); 
    decoder.release(); 
    extractor.release(); 
} 
} 

答えて

0

あなたは簡単に、これを試すことができます:

MediaPlayer mPlayer = MediaPlayer.create(ThisActivity.this, R.raw.mysoundfile); 
     mPlayer.start(); 

は、アクティビティが破棄されたとき、それを止めるために忘れてはならない。以上の引数については

public void onDestroy() { 

    mPlayer.stop(); 
    super.onDestroy(); 

} 

を、この古い記事を見て:

How do I play an mp3 in the res/raw folder of my android app?

0

バイト[] bがオーディオトラックに書き込むときに空に見えます。あなたはAudioTrack

に書き込む前にバイトこの

buffer.get(b, 0, info.size-info.offset); 

のような[] B何かを埋めることができます

関連する問題