1 /* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import com.android.cts.media.R; 19 20 import android.content.Context; 21 import android.content.res.AssetFileDescriptor; 22 import android.content.res.Resources; 23 import android.cts.util.MediaUtils; 24 import android.media.AudioFormat; 25 import android.media.AudioManager; 26 import android.media.AudioTrack; 27 import android.media.MediaCodec; 28 import android.media.MediaCodecInfo; 29 import android.media.MediaCodecList; 30 import android.media.MediaExtractor; 31 import android.media.MediaFormat; 32 import android.media.MediaSync; 33 import android.media.MediaTimestamp; 34 import android.media.PlaybackParams; 35 import android.media.SyncParams; 36 import android.os.Handler; 37 import android.os.HandlerThread; 38 import android.test.ActivityInstrumentationTestCase2; 39 import android.util.Log; 40 import android.view.Surface; 41 import android.view.SurfaceHolder; 42 43 import java.io.IOException; 44 import java.lang.Long; 45 import java.nio.ByteBuffer; 46 import java.util.concurrent.atomic.AtomicBoolean; 47 import java.util.List; 48 import java.util.LinkedList; 49 50 /** 51 * Tests for the MediaSync API and local video/audio playback. 52 * 53 * <p>The file in res/raw used by all tests are (c) copyright 2008, 54 * Blender Foundation / www.bigbuckbunny.org, and are licensed under the Creative Commons 55 * Attribution 3.0 License at http://creativecommons.org/licenses/by/3.0/us/. 56 */ 57 public class MediaSyncTest extends ActivityInstrumentationTestCase2<MediaStubActivity> { 58 private static final String LOG_TAG = "MediaSyncTest"; 59 60 private final long NO_TIMESTAMP = -1; 61 private final float FLOAT_PLAYBACK_RATE_TOLERANCE = .02f; 62 private final long TIME_MEASUREMENT_TOLERANCE_US = 20000; 63 final int INPUT_RESOURCE_ID = 64 R.raw.video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_192kbps_44100hz; 65 private final int APPLICATION_AUDIO_PERIOD_MS = 200; 66 private final int TEST_MAX_SPEED = 2; 67 private static final float FLOAT_TOLERANCE = .00001f; 68 69 private Context mContext; 70 private Resources mResources; 71 72 private MediaStubActivity mActivity; 73 74 private MediaSync mMediaSync = null; 75 private Surface mSurface = null; 76 private AudioTrack mAudioTrack = null; 77 78 private Decoder mDecoderVideo = null; 79 private Decoder mDecoderAudio = null; 80 private boolean mHasAudio = false; 81 private boolean mHasVideo = false; 82 private boolean mEosAudio = false; 83 private boolean mEosVideo = false; 84 private final Object mConditionEos = new Object(); 85 private final Object mConditionEosAudio = new Object(); 86 87 private int mNumBuffersReturned = 0; 88 MediaSyncTest()89 public MediaSyncTest() { 90 super(MediaStubActivity.class); 91 } 92 93 @Override setUp()94 protected void setUp() throws Exception { 95 super.setUp(); 96 mActivity = getActivity(); 97 getInstrumentation().waitForIdleSync(); 98 try { 99 runTestOnUiThread(new Runnable() { 100 public void run() { 101 mMediaSync = new MediaSync(); 102 } 103 }); 104 } catch (Throwable e) { 105 e.printStackTrace(); 106 fail(); 107 } 108 mContext = getInstrumentation().getTargetContext(); 109 mResources = mContext.getResources(); 110 mDecoderVideo = new Decoder(this, mMediaSync, false); 111 mDecoderAudio = new Decoder(this, mMediaSync, true); 112 } 113 114 @Override tearDown()115 protected void tearDown() throws Exception { 116 if (mMediaSync != null) { 117 mMediaSync.release(); 118 mMediaSync = null; 119 } 120 if (mDecoderAudio != null) { 121 mDecoderAudio.release(); 122 mDecoderAudio = null; 123 } 124 if (mDecoderVideo != null) { 125 mDecoderVideo.release(); 126 mDecoderVideo = null; 127 } 128 if (mSurface != null) { 129 mSurface.release(); 130 mSurface = null; 131 } 132 mActivity = null; 133 super.tearDown(); 134 } 135 reachedEos_l()136 private boolean reachedEos_l() { 137 return ((!mHasVideo || mEosVideo) && (!mHasAudio || mEosAudio)); 138 } 139 onEos(Decoder decoder)140 public void onEos(Decoder decoder) { 141 synchronized(mConditionEosAudio) { 142 if (decoder == mDecoderAudio) { 143 mEosAudio = true; 144 mConditionEosAudio.notify(); 145 } 146 } 147 148 synchronized(mConditionEos) { 149 if (decoder == mDecoderVideo) { 150 mEosVideo = true; 151 } 152 if (reachedEos_l()) { 153 mConditionEos.notify(); 154 } 155 } 156 } 157 158 /** 159 * Tests setPlaybackParams is handled correctly for wrong rate. 160 */ testSetPlaybackParamsFail()161 public void testSetPlaybackParamsFail() throws InterruptedException { 162 final float rate = -1.0f; 163 try { 164 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 165 fail("playback rate " + rate + " is not handled correctly"); 166 } catch (IllegalArgumentException e) { 167 } 168 169 assertTrue("The stream in test file can not be decoded", 170 mDecoderAudio.setup(INPUT_RESOURCE_ID, null, Long.MAX_VALUE)); 171 172 // get audio track. 173 mAudioTrack = mDecoderAudio.getAudioTrack(); 174 175 mMediaSync.setAudioTrack(mAudioTrack); 176 177 try { 178 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 179 fail("With audio track set, playback rate " + rate 180 + " is not handled correctly"); 181 } catch (IllegalArgumentException e) { 182 } 183 } 184 185 /** 186 * Tests setPlaybackParams is handled correctly for good rate without audio track set. 187 * The case for good rate with audio track set is tested in testPlaybackRate*. 188 */ testSetPlaybackParamsSucceed()189 public void testSetPlaybackParamsSucceed() throws InterruptedException { 190 final float rate = (float)TEST_MAX_SPEED; 191 try { 192 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 193 PlaybackParams pbp = mMediaSync.getPlaybackParams(); 194 assertEquals(rate, pbp.getSpeed(), FLOAT_TOLERANCE); 195 } catch (IllegalArgumentException e) { 196 fail("playback rate " + rate + " is not handled correctly"); 197 } 198 } 199 200 /** 201 * Tests returning audio buffers correctly. 202 */ testAudioBufferReturn()203 public void testAudioBufferReturn() throws InterruptedException { 204 final int timeOutMs = 10000; 205 boolean completed = runCheckAudioBuffer(INPUT_RESOURCE_ID, timeOutMs); 206 if (!completed) { 207 throw new RuntimeException("timed out waiting for audio buffer return"); 208 } 209 } 210 211 private PlaybackParams PAUSED_RATE = new PlaybackParams().setSpeed(0.f); 212 private PlaybackParams NORMAL_RATE = new PlaybackParams().setSpeed(1.f); 213 runCheckAudioBuffer(int inputResourceId, int timeOutMs)214 private boolean runCheckAudioBuffer(int inputResourceId, int timeOutMs) { 215 final int NUM_LOOPS = 10; 216 final Object condition = new Object(); 217 218 mHasAudio = true; 219 if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE) == false) { 220 return true; 221 } 222 223 // get audio track. 224 mAudioTrack = mDecoderAudio.getAudioTrack(); 225 226 mMediaSync.setAudioTrack(mAudioTrack); 227 228 mMediaSync.setCallback(new MediaSync.Callback() { 229 @Override 230 public void onAudioBufferConsumed( 231 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 232 Decoder decoderAudio = mDecoderAudio; 233 if (decoderAudio != null) { 234 decoderAudio.checkReturnedAudioBuffer(byteBuffer, bufferIndex); 235 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 236 synchronized (condition) { 237 ++mNumBuffersReturned; 238 if (mNumBuffersReturned >= NUM_LOOPS) { 239 condition.notify(); 240 } 241 } 242 } 243 } 244 }, null); 245 246 mMediaSync.setPlaybackParams(NORMAL_RATE); 247 248 synchronized (condition) { 249 mDecoderAudio.start(); 250 251 try { 252 condition.wait(timeOutMs); 253 } catch (InterruptedException e) { 254 } 255 return (mNumBuffersReturned >= NUM_LOOPS); 256 } 257 } 258 259 /** 260 * Tests flush. 261 */ testFlush()262 public void testFlush() throws InterruptedException { 263 final int timeOutMs = 5000; 264 boolean completed = runFlush(INPUT_RESOURCE_ID, timeOutMs); 265 if (!completed) { 266 throw new RuntimeException("timed out waiting for flush"); 267 } 268 } 269 runFlush(int inputResourceId, int timeOutMs)270 private boolean runFlush(int inputResourceId, int timeOutMs) { 271 final int INDEX_BEFORE_FLUSH = 1; 272 final int INDEX_AFTER_FLUSH = 2; 273 final int BUFFER_SIZE = 1024; 274 final int[] returnedIndex = new int[1]; 275 final Object condition = new Object(); 276 277 returnedIndex[0] = -1; 278 279 mHasAudio = true; 280 if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE) == false) { 281 return true; 282 } 283 284 // get audio track. 285 mAudioTrack = mDecoderAudio.getAudioTrack(); 286 287 mMediaSync.setAudioTrack(mAudioTrack); 288 289 mMediaSync.setCallback(new MediaSync.Callback() { 290 @Override 291 public void onAudioBufferConsumed( 292 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 293 synchronized (condition) { 294 if (returnedIndex[0] == -1) { 295 returnedIndex[0] = bufferIndex; 296 condition.notify(); 297 } 298 } 299 } 300 }, null); 301 302 mMediaSync.setOnErrorListener(new MediaSync.OnErrorListener() { 303 @Override 304 public void onError(MediaSync sync, int what, int extra) { 305 fail("got error from media sync (" + what + ", " + extra + ")"); 306 } 307 }, null); 308 309 mMediaSync.setPlaybackParams(PAUSED_RATE); 310 311 ByteBuffer buffer1 = ByteBuffer.allocate(BUFFER_SIZE); 312 ByteBuffer buffer2 = ByteBuffer.allocate(BUFFER_SIZE); 313 mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */); 314 mMediaSync.flush(); 315 mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */); 316 317 synchronized (condition) { 318 mMediaSync.setPlaybackParams(NORMAL_RATE); 319 320 try { 321 condition.wait(timeOutMs); 322 } catch (InterruptedException e) { 323 } 324 return (returnedIndex[0] == INDEX_AFTER_FLUSH); 325 } 326 } 327 328 /** 329 * Tests playing back audio successfully. 330 */ testPlayVideo()331 public void testPlayVideo() throws InterruptedException { 332 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 333 false /* audio */, true /* video */, 10000 /* timeOutMs */); 334 } 335 336 /** 337 * Tests playing back video successfully. 338 */ testPlayAudio()339 public void testPlayAudio() throws InterruptedException { 340 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 341 true /* audio */, false /* video */, 10000 /* timeOutMs */); 342 } 343 344 /** 345 * Tests playing back audio and video successfully. 346 */ testPlayAudioAndVideo()347 public void testPlayAudioAndVideo() throws InterruptedException { 348 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 349 true /* audio */, true /* video */, 10000 /* timeOutMs */); 350 } 351 352 /** 353 * Tests playing at specified playback rate successfully. 354 */ testPlaybackRateQuarter()355 public void testPlaybackRateQuarter() throws InterruptedException { 356 playAV(INPUT_RESOURCE_ID, 2000 /* lastBufferTimestampMs */, 357 true /* audio */, true /* video */, 10000 /* timeOutMs */, 358 0.25f /* playbackRate */); 359 } testPlaybackRateHalf()360 public void testPlaybackRateHalf() throws InterruptedException { 361 playAV(INPUT_RESOURCE_ID, 4000 /* lastBufferTimestampMs */, 362 true /* audio */, true /* video */, 10000 /* timeOutMs */, 363 0.5f /* playbackRate */); 364 } testPlaybackRateDouble()365 public void testPlaybackRateDouble() throws InterruptedException { 366 playAV(INPUT_RESOURCE_ID, 8000 /* lastBufferTimestampMs */, 367 true /* audio */, true /* video */, 10000 /* timeOutMs */, 368 (float)TEST_MAX_SPEED /* playbackRate */); 369 } 370 playAV( final int inputResourceId, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs)371 private void playAV( 372 final int inputResourceId, 373 final long lastBufferTimestampMs, 374 final boolean audio, 375 final boolean video, 376 int timeOutMs) throws InterruptedException { 377 playAV(inputResourceId, lastBufferTimestampMs, audio, video, timeOutMs, 1.0f); 378 } 379 playAV( final int inputResourceId, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs, final float playbackRate)380 private void playAV( 381 final int inputResourceId, 382 final long lastBufferTimestampMs, 383 final boolean audio, 384 final boolean video, 385 int timeOutMs, 386 final float playbackRate) throws InterruptedException { 387 final AtomicBoolean completed = new AtomicBoolean(); 388 Thread decodingThread = new Thread(new Runnable() { 389 @Override 390 public void run() { 391 completed.set(runPlayAV(inputResourceId, lastBufferTimestampMs * 1000, 392 audio, video, playbackRate)); 393 } 394 }); 395 decodingThread.start(); 396 decodingThread.join(timeOutMs); 397 if (!completed.get()) { 398 throw new RuntimeException("timed out decoding to end-of-stream"); 399 } 400 } 401 runPlayAV( int inputResourceId, long lastBufferTimestampUs, boolean audio, boolean video, float playbackRate)402 private boolean runPlayAV( 403 int inputResourceId, 404 long lastBufferTimestampUs, 405 boolean audio, 406 boolean video, 407 float playbackRate) { 408 // allow 250ms for playback to get to stable state. 409 final int PLAYBACK_RAMP_UP_TIME_MS = 250; 410 411 final Object conditionFirstAudioBuffer = new Object(); 412 413 if (video) { 414 mMediaSync.setSurface(mActivity.getSurfaceHolder().getSurface()); 415 mSurface = mMediaSync.createInputSurface(); 416 417 if (mDecoderVideo.setup( 418 inputResourceId, mSurface, lastBufferTimestampUs) == false) { 419 return true; 420 } 421 mHasVideo = true; 422 } 423 424 if (audio) { 425 if (mDecoderAudio.setup(inputResourceId, null, lastBufferTimestampUs) == false) { 426 return true; 427 } 428 429 // get audio track. 430 mAudioTrack = mDecoderAudio.getAudioTrack(); 431 432 mMediaSync.setAudioTrack(mAudioTrack); 433 434 mMediaSync.setCallback(new MediaSync.Callback() { 435 @Override 436 public void onAudioBufferConsumed( 437 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 438 Decoder decoderAudio = mDecoderAudio; 439 if (decoderAudio != null) { 440 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 441 } 442 synchronized (conditionFirstAudioBuffer) { 443 conditionFirstAudioBuffer.notify(); 444 } 445 } 446 }, null); 447 448 mHasAudio = true; 449 } 450 451 SyncParams sync = new SyncParams().allowDefaults(); 452 mMediaSync.setSyncParams(sync); 453 sync = mMediaSync.getSyncParams(); 454 455 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate)); 456 457 synchronized (conditionFirstAudioBuffer) { 458 if (video) { 459 mDecoderVideo.start(); 460 } 461 if (audio) { 462 mDecoderAudio.start(); 463 464 // wait for the first audio output buffer returned by media sync. 465 try { 466 conditionFirstAudioBuffer.wait(); 467 } catch (InterruptedException e) { 468 Log.i(LOG_TAG, "worker thread is interrupted."); 469 return true; 470 } 471 } 472 } 473 474 if (audio) { 475 try { 476 Thread.sleep(PLAYBACK_RAMP_UP_TIME_MS); 477 } catch (InterruptedException e) { 478 Log.i(LOG_TAG, "worker thread is interrupted during sleeping."); 479 return true; 480 } 481 482 MediaTimestamp mediaTimestamp = mMediaSync.getTimestamp(); 483 assertTrue("No timestamp available for starting", mediaTimestamp != null); 484 long checkStartTimeRealUs = System.nanoTime() / 1000; 485 long checkStartTimeMediaUs = mediaTimestamp.mediaTimeUs; 486 487 synchronized (mConditionEosAudio) { 488 if (!mEosAudio) { 489 try { 490 mConditionEosAudio.wait(); 491 } catch (InterruptedException e) { 492 Log.i(LOG_TAG, "worker thread is interrupted when waiting for audio EOS."); 493 return true; 494 } 495 } 496 } 497 mediaTimestamp = mMediaSync.getTimestamp(); 498 assertTrue("No timestamp available for ending", mediaTimestamp != null); 499 long playTimeUs = System.nanoTime() / 1000 - checkStartTimeRealUs; 500 long mediaDurationUs = mediaTimestamp.mediaTimeUs - checkStartTimeMediaUs; 501 assertEquals("Mediasync had error in playback rate " + playbackRate 502 + ", play time is " + playTimeUs + " vs expected " + mediaDurationUs, 503 mediaDurationUs, 504 playTimeUs * playbackRate, 505 // sync.getTolerance() is MediaSync's tolerance of the playback rate, whereas 506 // FLOAT_PLAYBACK_RATE_TOLERANCE is our test's tolerance. 507 // We need to add both to get an upperbound for allowable error. 508 mediaDurationUs * (sync.getTolerance() + FLOAT_PLAYBACK_RATE_TOLERANCE) 509 + TIME_MEASUREMENT_TOLERANCE_US); 510 } 511 512 boolean completed = false; 513 synchronized (mConditionEos) { 514 if (!reachedEos_l()) { 515 try { 516 mConditionEos.wait(); 517 } catch (InterruptedException e) { 518 } 519 } 520 completed = reachedEos_l(); 521 } 522 return completed; 523 } 524 525 private class Decoder extends MediaCodec.Callback { 526 private final int NO_SAMPLE_RATE = -1; 527 private final int NO_BUFFER_INDEX = -1; 528 529 private MediaSyncTest mMediaSyncTest = null; 530 private MediaSync mMediaSync = null; 531 private boolean mIsAudio = false; 532 private long mLastBufferTimestampUs = 0; 533 534 private Surface mSurface = null; 535 536 private AudioTrack mAudioTrack = null; 537 538 private final Object mConditionCallback = new Object(); 539 private MediaExtractor mExtractor = null; 540 private MediaCodec mDecoder = null; 541 542 private final Object mAudioBufferLock = new Object(); 543 private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>(); 544 545 // accessed only on callback thread. 546 private boolean mEos = false; 547 private boolean mSignaledEos = false; 548 549 private class AudioBuffer { 550 public ByteBuffer mByteBuffer; 551 public int mBufferIndex; 552 AudioBuffer(ByteBuffer byteBuffer, int bufferIndex)553 public AudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 554 mByteBuffer = byteBuffer; 555 mBufferIndex = bufferIndex; 556 } 557 } 558 559 private HandlerThread mHandlerThread; 560 private Handler mHandler; 561 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio)562 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio) { 563 mMediaSyncTest = test; 564 mMediaSync = sync; 565 mIsAudio = isAudio; 566 } 567 setup(int inputResourceId, Surface surface, long lastBufferTimestampUs)568 public boolean setup(int inputResourceId, Surface surface, long lastBufferTimestampUs) { 569 if (!mIsAudio) { 570 mSurface = surface; 571 // handle video callback in a separate thread as releaseOutputBuffer is blocking 572 mHandlerThread = new HandlerThread("SyncViewVidDec"); 573 mHandlerThread.start(); 574 mHandler = new Handler(mHandlerThread.getLooper()); 575 } 576 mLastBufferTimestampUs = lastBufferTimestampUs; 577 try { 578 // get extrator. 579 String type = mIsAudio ? "audio/" : "video/"; 580 mExtractor = MediaUtils.createMediaExtractorForMimeType( 581 mContext, inputResourceId, type); 582 583 // get decoder. 584 MediaFormat mediaFormat = 585 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 586 String mimeType = mediaFormat.getString(MediaFormat.KEY_MIME); 587 if (!MediaUtils.hasDecoder(mimeType)) { 588 Log.i(LOG_TAG, "No decoder found for mimeType= " + mimeType); 589 return false; 590 } 591 mDecoder = MediaCodec.createDecoderByType(mimeType); 592 mDecoder.configure(mediaFormat, mSurface, null, 0); 593 mDecoder.setCallback(this, mHandler); 594 595 return true; 596 } catch (IOException e) { 597 throw new RuntimeException("error reading input resource", e); 598 } 599 } 600 start()601 public void start() { 602 if (mDecoder != null) { 603 mDecoder.start(); 604 } 605 } 606 release()607 public void release() { 608 synchronized (mConditionCallback) { 609 if (mDecoder != null) { 610 try { 611 mDecoder.stop(); 612 } catch (IllegalStateException e) { 613 } 614 mDecoder.release(); 615 mDecoder = null; 616 } 617 if (mExtractor != null) { 618 mExtractor.release(); 619 mExtractor = null; 620 } 621 } 622 623 if (mAudioTrack != null) { 624 mAudioTrack.release(); 625 mAudioTrack = null; 626 } 627 } 628 getAudioTrack()629 public AudioTrack getAudioTrack() { 630 if (!mIsAudio) { 631 throw new RuntimeException("can not create audio track for video"); 632 } 633 634 if (mExtractor == null) { 635 throw new RuntimeException("extrator is null"); 636 } 637 638 if (mAudioTrack == null) { 639 MediaFormat mediaFormat = 640 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 641 int sampleRateInHz = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); 642 int channelConfig = (mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT) == 1 ? 643 AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); 644 int audioFormat = AudioFormat.ENCODING_PCM_16BIT; 645 int minBufferSizeInBytes = AudioTrack.getMinBufferSize( 646 sampleRateInHz, 647 channelConfig, 648 audioFormat); 649 final int frameCount = APPLICATION_AUDIO_PERIOD_MS * sampleRateInHz / 1000; 650 final int frameSizeInBytes = Integer.bitCount(channelConfig) 651 * AudioFormat.getBytesPerSample(audioFormat); 652 // ensure we consider application requirements for writing audio data 653 minBufferSizeInBytes = TEST_MAX_SPEED /* speed influences buffer size */ 654 * Math.max(minBufferSizeInBytes, frameCount * frameSizeInBytes); 655 mAudioTrack = new AudioTrack( 656 AudioManager.STREAM_MUSIC, 657 sampleRateInHz, 658 channelConfig, 659 audioFormat, 660 minBufferSizeInBytes, 661 AudioTrack.MODE_STREAM); 662 } 663 664 return mAudioTrack; 665 } 666 releaseOutputBuffer(int bufferIndex, long renderTimestampNs)667 public void releaseOutputBuffer(int bufferIndex, long renderTimestampNs) { 668 synchronized (mConditionCallback) { 669 if (mDecoder != null) { 670 if (renderTimestampNs == NO_TIMESTAMP) { 671 mDecoder.releaseOutputBuffer(bufferIndex, false /* render */); 672 } else { 673 mDecoder.releaseOutputBuffer(bufferIndex, renderTimestampNs); 674 } 675 } 676 } 677 } 678 679 @Override onError(MediaCodec codec, MediaCodec.CodecException e)680 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 681 } 682 683 @Override onInputBufferAvailable(MediaCodec codec, int index)684 public void onInputBufferAvailable(MediaCodec codec, int index) { 685 synchronized (mConditionCallback) { 686 if (mExtractor == null || mExtractor.getSampleTrackIndex() == -1 687 || mSignaledEos || mDecoder != codec) { 688 return; 689 } 690 691 ByteBuffer buffer = codec.getInputBuffer(index); 692 int size = mExtractor.readSampleData(buffer, 0); 693 long timestampUs = mExtractor.getSampleTime(); 694 mExtractor.advance(); 695 mSignaledEos = mExtractor.getSampleTrackIndex() == -1 696 || timestampUs >= mLastBufferTimestampUs; 697 codec.queueInputBuffer( 698 index, 699 0, 700 size, 701 timestampUs, 702 mSignaledEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 703 } 704 } 705 706 @Override onOutputBufferAvailable( MediaCodec codec, int index, MediaCodec.BufferInfo info)707 public void onOutputBufferAvailable( 708 MediaCodec codec, int index, MediaCodec.BufferInfo info) { 709 synchronized (mConditionCallback) { 710 if (mEos || mDecoder != codec) { 711 return; 712 } 713 714 mEos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; 715 716 if (info.size > 0) { 717 if (mIsAudio) { 718 ByteBuffer outputByteBuffer = codec.getOutputBuffer(index); 719 synchronized(mAudioBufferLock) { 720 mAudioBuffers.add(new AudioBuffer(outputByteBuffer, index)); 721 } 722 mMediaSync.queueAudio( 723 outputByteBuffer, 724 index, 725 info.presentationTimeUs); 726 } else { 727 codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000); 728 } 729 } else { 730 codec.releaseOutputBuffer(index, false); 731 } 732 } 733 734 if (mEos) { 735 mMediaSyncTest.onEos(this); 736 } 737 } 738 739 @Override onOutputFormatChanged(MediaCodec codec, MediaFormat format)740 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 741 } 742 checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex)743 public void checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 744 synchronized(mAudioBufferLock) { 745 AudioBuffer audioBuffer = mAudioBuffers.get(0); 746 if (audioBuffer.mByteBuffer != byteBuffer 747 || audioBuffer.mBufferIndex != bufferIndex) { 748 fail("returned buffer doesn't match what's sent"); 749 } 750 mAudioBuffers.remove(0); 751 } 752 } 753 } 754 } 755