1 /* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import android.content.Context; 19 import android.content.pm.PackageManager; 20 import android.media.AudioFormat; 21 import android.media.AudioManager; 22 import android.media.AudioTrack; 23 import android.media.MediaCodec; 24 import android.media.MediaExtractor; 25 import android.media.MediaFormat; 26 import android.media.MediaSync; 27 import android.media.MediaTimestamp; 28 import android.media.PlaybackParams; 29 import android.media.SyncParams; 30 import android.os.Handler; 31 import android.os.HandlerThread; 32 import android.platform.test.annotations.AppModeFull; 33 import android.platform.test.annotations.RequiresDevice; 34 import android.test.ActivityInstrumentationTestCase2; 35 import android.util.Log; 36 import android.view.Surface; 37 38 import androidx.test.filters.SmallTest; 39 40 import com.android.compatibility.common.util.MediaUtils; 41 42 import java.io.IOException; 43 import java.lang.Long; 44 import java.lang.Math; 45 import java.nio.ByteBuffer; 46 import java.util.LinkedList; 47 import java.util.List; 48 import java.util.concurrent.atomic.AtomicBoolean; 49 50 /** 51 * Tests for the MediaSync API and local video/audio playback. 52 * 53 * <p>The file in res/raw used by all tests are (c) copyright 2008, 54 * Blender Foundation / www.bigbuckbunny.org, and are licensed under the Creative Commons 55 * Attribution 3.0 License at http://creativecommons.org/licenses/by/3.0/us/. 56 */ 57 @NonMediaMainlineTest 58 @SmallTest 59 @RequiresDevice 60 @AppModeFull(reason = "TODO: evaluate and port to instant") 61 public class MediaSyncTest extends ActivityInstrumentationTestCase2<MediaStubActivity> { 62 private static final String LOG_TAG = "MediaSyncTest"; 63 64 static final String mInpPrefix = WorkDir.getMediaDirString(); 65 private final long NO_TIMESTAMP = -1; 66 private final float FLOAT_PLAYBACK_RATE_TOLERANCE = .02f; 67 private final long TIME_MEASUREMENT_TOLERANCE_US = 20000; 68 final String INPUT_RESOURCE = 69 mInpPrefix + "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_192kbps_44100hz.mp4"; 70 private final int APPLICATION_AUDIO_PERIOD_MS = 200; 71 private final int TEST_MAX_SPEED = 2; 72 private static final float FLOAT_TOLERANCE = .00001f; 73 74 private Context mContext; 75 76 private MediaStubActivity mActivity; 77 78 private MediaSync mMediaSync = null; 79 private Surface mSurface = null; 80 81 private Decoder mDecoderVideo = null; 82 private Decoder mDecoderAudio = null; 83 private boolean mHasAudio = false; 84 private boolean mHasVideo = false; 85 private boolean mEosAudio = false; 86 private boolean mEosVideo = false; 87 private int mTaggedAudioBufferIndex = -1; 88 private final Object mConditionEos = new Object(); 89 private final Object mConditionEosAudio = new Object(); 90 private final Object mConditionTaggedAudioBufferIndex = new Object(); 91 92 private int mNumBuffersReturned = 0; 93 MediaSyncTest()94 public MediaSyncTest() { 95 super(MediaStubActivity.class); 96 } 97 98 @Override setUp()99 protected void setUp() throws Exception { 100 super.setUp(); 101 mActivity = getActivity(); 102 getInstrumentation().waitForIdleSync(); 103 try { 104 runTestOnUiThread(new Runnable() { 105 public void run() { 106 mMediaSync = new MediaSync(); 107 } 108 }); 109 } catch (Throwable e) { 110 e.printStackTrace(); 111 fail(); 112 } 113 mContext = getInstrumentation().getTargetContext(); 114 mDecoderVideo = new Decoder(this, mMediaSync, false); 115 mDecoderAudio = new Decoder(this, mMediaSync, true); 116 } 117 118 @Override tearDown()119 protected void tearDown() throws Exception { 120 if (mMediaSync != null) { 121 mMediaSync.release(); 122 mMediaSync = null; 123 } 124 if (mDecoderAudio != null) { 125 mDecoderAudio.release(); 126 mDecoderAudio = null; 127 } 128 if (mDecoderVideo != null) { 129 mDecoderVideo.release(); 130 mDecoderVideo = null; 131 } 132 if (mSurface != null) { 133 mSurface.release(); 134 mSurface = null; 135 } 136 mActivity = null; 137 mHasAudio = false; 138 mHasVideo = false; 139 mEosAudio = false; 140 mEosVideo = false; 141 mTaggedAudioBufferIndex = -1; 142 super.tearDown(); 143 } 144 reachedEos_l()145 private boolean reachedEos_l() { 146 return ((!mHasVideo || mEosVideo) && (!mHasAudio || mEosAudio)); 147 } 148 onTaggedAudioBufferIndex(Decoder decoder, int index)149 public void onTaggedAudioBufferIndex(Decoder decoder, int index) { 150 synchronized (mConditionTaggedAudioBufferIndex) { 151 if (decoder == mDecoderAudio) { 152 mTaggedAudioBufferIndex = index; 153 } 154 } 155 } 156 onEos(Decoder decoder)157 public void onEos(Decoder decoder) { 158 synchronized (mConditionEosAudio) { 159 if (decoder == mDecoderAudio) { 160 mEosAudio = true; 161 mConditionEosAudio.notify(); 162 } 163 } 164 165 synchronized (mConditionEos) { 166 if (decoder == mDecoderVideo) { 167 mEosVideo = true; 168 } 169 if (reachedEos_l()) { 170 mConditionEos.notify(); 171 } 172 } 173 } 174 hasAudioOutput()175 private boolean hasAudioOutput() { 176 return mActivity.getPackageManager() 177 .hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT); 178 } 179 180 /** 181 * Tests setPlaybackParams is handled correctly for wrong rate. 182 */ testSetPlaybackParamsFail()183 public void testSetPlaybackParamsFail() throws InterruptedException { 184 final float rate = -1.0f; 185 try { 186 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 187 fail("playback rate " + rate + " is not handled correctly"); 188 } catch (IllegalArgumentException e) { 189 } 190 191 assertTrue("The stream in test file can not be decoded", 192 mDecoderAudio.setup(INPUT_RESOURCE, null, Long.MAX_VALUE, NO_TIMESTAMP)); 193 194 // get audio track. 195 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 196 197 try { 198 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 199 fail("With audio track set, playback rate " + rate 200 + " is not handled correctly"); 201 } catch (IllegalArgumentException e) { 202 } 203 } 204 205 /** 206 * Tests setPlaybackParams is handled correctly for good rate without audio track set. 207 * The case for good rate with audio track set is tested in testPlaybackRate*. 208 */ testSetPlaybackParamsSucceed()209 public void testSetPlaybackParamsSucceed() throws InterruptedException { 210 final float rate = (float)TEST_MAX_SPEED; 211 try { 212 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 213 PlaybackParams pbp = mMediaSync.getPlaybackParams(); 214 assertEquals(rate, pbp.getSpeed(), FLOAT_TOLERANCE); 215 } catch (IllegalArgumentException e) { 216 fail("playback rate " + rate + " is not handled correctly"); 217 } 218 } 219 220 /** 221 * Tests returning audio buffers correctly. 222 */ testAudioBufferReturn()223 public void testAudioBufferReturn() throws InterruptedException { 224 final int timeOutMs = 10000; 225 boolean completed = runCheckAudioBuffer(INPUT_RESOURCE, timeOutMs); 226 if (!completed) { 227 throw new RuntimeException("timed out waiting for audio buffer return"); 228 } 229 } 230 231 private PlaybackParams PAUSED_RATE = new PlaybackParams().setSpeed(0.f); 232 private PlaybackParams NORMAL_RATE = new PlaybackParams().setSpeed(1.f); 233 runCheckAudioBuffer(String inputResource, int timeOutMs)234 private boolean runCheckAudioBuffer(String inputResource, int timeOutMs) { 235 final int NUM_LOOPS = 10; 236 final Object condition = new Object(); 237 238 mHasAudio = true; 239 if (mDecoderAudio.setup(inputResource, null, Long.MAX_VALUE, NO_TIMESTAMP) == false) { 240 return true; 241 } 242 243 // get audio track. 244 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 245 246 mMediaSync.setCallback(new MediaSync.Callback() { 247 @Override 248 public void onAudioBufferConsumed( 249 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 250 Decoder decoderAudio = mDecoderAudio; 251 if (decoderAudio != null) { 252 decoderAudio.checkReturnedAudioBuffer(byteBuffer, bufferIndex); 253 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 254 synchronized (condition) { 255 ++mNumBuffersReturned; 256 if (mNumBuffersReturned >= NUM_LOOPS) { 257 condition.notify(); 258 } 259 } 260 } 261 } 262 }, null); 263 264 mMediaSync.setPlaybackParams(NORMAL_RATE); 265 266 synchronized (condition) { 267 mDecoderAudio.start(); 268 269 try { 270 condition.wait(timeOutMs); 271 } catch (InterruptedException e) { 272 } 273 return (mNumBuffersReturned >= NUM_LOOPS); 274 } 275 } 276 277 /** 278 * Tests flush. 279 */ testFlush()280 public void testFlush() throws InterruptedException { 281 final int timeOutMs = 5000; 282 boolean completed = runFlush(INPUT_RESOURCE, timeOutMs); 283 if (!completed) { 284 throw new RuntimeException("timed out waiting for flush"); 285 } 286 } 287 runFlush(String inputResource, int timeOutMs)288 private boolean runFlush(String inputResource, int timeOutMs) { 289 final int INDEX_BEFORE_FLUSH = 1; 290 final int INDEX_AFTER_FLUSH = 2; 291 final int BUFFER_SIZE = 1024; 292 final int[] returnedIndex = new int[1]; 293 final Object condition = new Object(); 294 295 returnedIndex[0] = -1; 296 297 mHasAudio = true; 298 if (mDecoderAudio.setup(inputResource, null, Long.MAX_VALUE, NO_TIMESTAMP) == false) { 299 return true; 300 } 301 302 // get audio track. 303 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 304 305 mMediaSync.setCallback(new MediaSync.Callback() { 306 @Override 307 public void onAudioBufferConsumed( 308 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 309 synchronized (condition) { 310 if (returnedIndex[0] == -1) { 311 returnedIndex[0] = bufferIndex; 312 condition.notify(); 313 } 314 } 315 } 316 }, null); 317 318 mMediaSync.setOnErrorListener(new MediaSync.OnErrorListener() { 319 @Override 320 public void onError(MediaSync sync, int what, int extra) { 321 fail("got error from media sync (" + what + ", " + extra + ")"); 322 } 323 }, null); 324 325 mMediaSync.setPlaybackParams(PAUSED_RATE); 326 327 ByteBuffer buffer1 = ByteBuffer.allocate(BUFFER_SIZE); 328 ByteBuffer buffer2 = ByteBuffer.allocate(BUFFER_SIZE); 329 mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */); 330 mMediaSync.flush(); 331 mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */); 332 333 synchronized (condition) { 334 mMediaSync.setPlaybackParams(NORMAL_RATE); 335 336 try { 337 condition.wait(timeOutMs); 338 } catch (InterruptedException e) { 339 } 340 return (returnedIndex[0] == INDEX_AFTER_FLUSH); 341 } 342 } 343 344 /** 345 * Tests playing back audio successfully. 346 */ testPlayVideo()347 public void testPlayVideo() throws Exception { 348 playAV(INPUT_RESOURCE, 5000 /* lastBufferTimestampMs */, 349 false /* audio */, true /* video */, 10000 /* timeOutMs */); 350 } 351 352 /** 353 * Tests playing back video successfully. 354 */ testPlayAudio()355 public void testPlayAudio() throws Exception { 356 if (!hasAudioOutput()) { 357 Log.w(LOG_TAG,"AUDIO_OUTPUT feature not found. This system might not have a valid " 358 + "audio output HAL"); 359 return; 360 } 361 362 playAV(INPUT_RESOURCE, 5000 /* lastBufferTimestampMs */, 363 true /* audio */, false /* video */, 10000 /* timeOutMs */); 364 } 365 366 /** 367 * Tests playing back audio and video successfully. 368 */ testPlayAudioAndVideo()369 public void testPlayAudioAndVideo() throws Exception { 370 playAV(INPUT_RESOURCE, 5000 /* lastBufferTimestampMs */, 371 true /* audio */, true /* video */, 10000 /* timeOutMs */); 372 } 373 374 /** 375 * Tests playing at specified playback rate successfully. 376 */ testPlaybackRateQuarter()377 public void testPlaybackRateQuarter() throws Exception { 378 playAV(INPUT_RESOURCE, 2000 /* lastBufferTimestampMs */, 379 true /* audio */, true /* video */, 10000 /* timeOutMs */, 380 0.25f /* playbackRate */); 381 } testPlaybackRateHalf()382 public void testPlaybackRateHalf() throws Exception { 383 playAV(INPUT_RESOURCE, 4000 /* lastBufferTimestampMs */, 384 true /* audio */, true /* video */, 10000 /* timeOutMs */, 385 0.5f /* playbackRate */); 386 } testPlaybackRateDouble()387 public void testPlaybackRateDouble() throws Exception { 388 playAV(INPUT_RESOURCE, 8000 /* lastBufferTimestampMs */, 389 true /* audio */, true /* video */, 10000 /* timeOutMs */, 390 (float)TEST_MAX_SPEED /* playbackRate */); 391 } 392 playAV( final String inputResource, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs)393 private void playAV( 394 final String inputResource, 395 final long lastBufferTimestampMs, 396 final boolean audio, 397 final boolean video, 398 int timeOutMs) throws Exception { 399 playAV(inputResource, lastBufferTimestampMs, audio, video, timeOutMs, 1.0f); 400 } 401 402 private class PlayAVState { 403 boolean mTimeValid; 404 long mMediaDurationUs; 405 long mClockDurationUs; 406 float mSyncTolerance; 407 }; 408 playAV( final String inputResource, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs, final float playbackRate)409 private void playAV( 410 final String inputResource, 411 final long lastBufferTimestampMs, 412 final boolean audio, 413 final boolean video, 414 int timeOutMs, 415 final float playbackRate) throws Exception { 416 final int limit = 5; 417 String info = ""; 418 Preconditions.assertTestFileExists(inputResource); 419 for (int tries = 0; ; ++tries) { 420 // Run test 421 final AtomicBoolean completed = new AtomicBoolean(); 422 final PlayAVState state = new PlayAVState(); 423 Thread decodingThread = new Thread(new Runnable() { 424 @Override 425 public void run() { 426 completed.set(runPlayAV(inputResource, lastBufferTimestampMs * 1000, 427 audio, video, playbackRate, state)); 428 } 429 }); 430 decodingThread.start(); 431 decodingThread.join(timeOutMs); 432 assertTrue("timed out decoding to end-of-stream", completed.get()); 433 434 // Examine results 435 if (!state.mTimeValid) return; 436 437 // sync.getTolerance() is MediaSync's tolerance of the playback rate, whereas 438 // FLOAT_PLAYBACK_RATE_TOLERANCE is our test's tolerance. 439 // We need to add both to get an upperbound for allowable error. 440 final double tolerance = state.mMediaDurationUs 441 * (state.mSyncTolerance + FLOAT_PLAYBACK_RATE_TOLERANCE) 442 + TIME_MEASUREMENT_TOLERANCE_US; 443 final double diff = state.mMediaDurationUs - state.mClockDurationUs * playbackRate ; 444 info += "[" + tries 445 + "] playbackRate " + playbackRate 446 + ", clockDurationUs " + state.mClockDurationUs 447 + ", mediaDurationUs " + state.mMediaDurationUs 448 + ", diff " + diff 449 + ", tolerance " + tolerance + "\n"; 450 451 // Good enough? 452 if (Math.abs(diff) <= tolerance) { 453 Log.d(LOG_TAG, info); 454 return; 455 } 456 assertTrue("bad playback\n" + info, tries < limit); 457 458 Log.d(LOG_TAG, "Trying again\n" + info); 459 460 // Try again (may throw Exception) 461 tearDown(); 462 setUp(); 463 464 Thread.sleep(1000 /* millis */); 465 } 466 } 467 runPlayAV( String inputResource, long lastBufferTimestampUs, boolean audio, boolean video, float playbackRate, PlayAVState state)468 private boolean runPlayAV( 469 String inputResource, 470 long lastBufferTimestampUs, 471 boolean audio, 472 boolean video, 473 float playbackRate, 474 PlayAVState state) { 475 // allow 750ms for playback to get to stable state. 476 final int PLAYBACK_RAMP_UP_TIME_US = 750000; 477 478 Preconditions.assertTestFileExists(inputResource); 479 480 final Object conditionFirstAudioBuffer = new Object(); 481 482 if (video) { 483 mMediaSync.setSurface(mActivity.getSurfaceHolder().getSurface()); 484 mSurface = mMediaSync.createInputSurface(); 485 486 if (mDecoderVideo.setup( 487 inputResource, mSurface, lastBufferTimestampUs, NO_TIMESTAMP) == false) { 488 return true; 489 } 490 mHasVideo = true; 491 } 492 493 if (audio) { 494 if (mDecoderAudio.setup( 495 inputResource, null, lastBufferTimestampUs, 496 PLAYBACK_RAMP_UP_TIME_US) == false) { 497 return true; 498 } 499 500 // get audio track. 501 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 502 503 mMediaSync.setCallback(new MediaSync.Callback() { 504 @Override 505 public void onAudioBufferConsumed( 506 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 507 Decoder decoderAudio = mDecoderAudio; 508 if (decoderAudio != null) { 509 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 510 } 511 synchronized (conditionFirstAudioBuffer) { 512 synchronized (mConditionTaggedAudioBufferIndex) { 513 if (mTaggedAudioBufferIndex >= 0 514 && mTaggedAudioBufferIndex == bufferIndex) { 515 conditionFirstAudioBuffer.notify(); 516 } 517 } 518 } 519 } 520 }, null); 521 522 mHasAudio = true; 523 } 524 525 SyncParams sync = new SyncParams().allowDefaults(); 526 mMediaSync.setSyncParams(sync); 527 sync = mMediaSync.getSyncParams(); 528 529 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate)); 530 531 synchronized (conditionFirstAudioBuffer) { 532 if (video) { 533 mDecoderVideo.start(); 534 } 535 if (audio) { 536 mDecoderAudio.start(); 537 538 // wait for the first audio output buffer returned by media sync. 539 try { 540 conditionFirstAudioBuffer.wait(); 541 } catch (InterruptedException e) { 542 Log.i(LOG_TAG, "worker thread is interrupted."); 543 return true; 544 } 545 } 546 } 547 548 if (audio) { 549 MediaTimestamp mediaTimestamp = mMediaSync.getTimestamp(); 550 assertTrue("No timestamp available for starting", mediaTimestamp != null); 551 long checkStartTimeRealUs = System.nanoTime() / 1000; 552 long checkStartTimeMediaUs = mediaTimestamp.mediaTimeUs; 553 554 synchronized (mConditionEosAudio) { 555 if (!mEosAudio) { 556 try { 557 mConditionEosAudio.wait(); 558 } catch (InterruptedException e) { 559 Log.i(LOG_TAG, "worker thread is interrupted when waiting for audio EOS."); 560 return true; 561 } 562 } 563 } 564 mediaTimestamp = mMediaSync.getTimestamp(); 565 assertTrue("No timestamp available for ending", mediaTimestamp != null); 566 state.mTimeValid = true; 567 state.mClockDurationUs = System.nanoTime() / 1000 - checkStartTimeRealUs; 568 state.mMediaDurationUs = mediaTimestamp.mediaTimeUs - checkStartTimeMediaUs; 569 state.mSyncTolerance = sync.getTolerance(); 570 } 571 572 boolean completed = false; 573 synchronized (mConditionEos) { 574 if (!reachedEos_l()) { 575 try { 576 mConditionEos.wait(); 577 } catch (InterruptedException e) { 578 } 579 } 580 completed = reachedEos_l(); 581 } 582 return completed; 583 } 584 585 private class Decoder extends MediaCodec.Callback { 586 private final int NO_SAMPLE_RATE = -1; 587 private final int NO_BUFFER_INDEX = -1; 588 589 private MediaSyncTest mMediaSyncTest = null; 590 private MediaSync mMediaSync = null; 591 private boolean mIsAudio = false; 592 private long mLastBufferTimestampUs = 0; 593 private long mStartingAudioTimestampUs = NO_TIMESTAMP; 594 595 private Surface mSurface = null; 596 597 private AudioTrack mAudioTrack = null; 598 599 private final Object mConditionCallback = new Object(); 600 private MediaExtractor mExtractor = null; 601 private MediaCodec mDecoder = null; 602 603 private final Object mAudioBufferLock = new Object(); 604 private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>(); 605 606 // accessed only on callback thread. 607 private boolean mEos = false; 608 private boolean mSignaledEos = false; 609 610 private class AudioBuffer { 611 public ByteBuffer mByteBuffer; 612 public int mBufferIndex; 613 AudioBuffer(ByteBuffer byteBuffer, int bufferIndex)614 public AudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 615 mByteBuffer = byteBuffer; 616 mBufferIndex = bufferIndex; 617 } 618 } 619 620 private HandlerThread mHandlerThread; 621 private Handler mHandler; 622 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio)623 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio) { 624 mMediaSyncTest = test; 625 mMediaSync = sync; 626 mIsAudio = isAudio; 627 } 628 setup( String inputResource, Surface surface, long lastBufferTimestampUs, long startingAudioTimestampUs)629 public boolean setup( 630 String inputResource, Surface surface, long lastBufferTimestampUs, 631 long startingAudioTimestampUs) { 632 if (!mIsAudio) { 633 mSurface = surface; 634 // handle video callback in a separate thread as releaseOutputBuffer is blocking 635 mHandlerThread = new HandlerThread("SyncViewVidDec"); 636 mHandlerThread.start(); 637 mHandler = new Handler(mHandlerThread.getLooper()); 638 } 639 mLastBufferTimestampUs = lastBufferTimestampUs; 640 mStartingAudioTimestampUs = startingAudioTimestampUs; 641 try { 642 // get extrator. 643 String type = mIsAudio ? "audio/" : "video/"; 644 mExtractor = MediaUtils.createMediaExtractorForMimeType( 645 mContext, inputResource, type); 646 647 // get decoder. 648 MediaFormat mediaFormat = 649 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 650 String mimeType = mediaFormat.getString(MediaFormat.KEY_MIME); 651 if (!MediaUtils.hasDecoder(mimeType)) { 652 Log.i(LOG_TAG, "No decoder found for mimeType= " + mimeType); 653 return false; 654 } 655 mDecoder = MediaCodec.createDecoderByType(mimeType); 656 mDecoder.configure(mediaFormat, mSurface, null, 0); 657 mDecoder.setCallback(this, mHandler); 658 659 return true; 660 } catch (IOException e) { 661 throw new RuntimeException("error reading input resource", e); 662 } 663 } 664 start()665 public void start() { 666 if (mDecoder != null) { 667 mDecoder.start(); 668 } 669 } 670 release()671 public void release() { 672 synchronized (mConditionCallback) { 673 if (mDecoder != null) { 674 try { 675 mDecoder.stop(); 676 } catch (IllegalStateException e) { 677 } 678 mDecoder.release(); 679 mDecoder = null; 680 } 681 if (mExtractor != null) { 682 mExtractor.release(); 683 mExtractor = null; 684 } 685 } 686 687 if (mAudioTrack != null) { 688 mAudioTrack.release(); 689 mAudioTrack = null; 690 } 691 } 692 getAudioTrack()693 public AudioTrack getAudioTrack() { 694 if (!mIsAudio) { 695 throw new RuntimeException("can not create audio track for video"); 696 } 697 698 if (mExtractor == null) { 699 throw new RuntimeException("extrator is null"); 700 } 701 702 if (mAudioTrack == null) { 703 MediaFormat mediaFormat = 704 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 705 int sampleRateInHz = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); 706 int channelConfig = (mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT) == 1 ? 707 AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); 708 int audioFormat = AudioFormat.ENCODING_PCM_16BIT; 709 int minBufferSizeInBytes = AudioTrack.getMinBufferSize( 710 sampleRateInHz, 711 channelConfig, 712 audioFormat); 713 final int frameCount = APPLICATION_AUDIO_PERIOD_MS * sampleRateInHz / 1000; 714 final int frameSizeInBytes = Integer.bitCount(channelConfig) 715 * AudioFormat.getBytesPerSample(audioFormat); 716 // ensure we consider application requirements for writing audio data 717 minBufferSizeInBytes = TEST_MAX_SPEED /* speed influences buffer size */ 718 * Math.max(minBufferSizeInBytes, frameCount * frameSizeInBytes); 719 mAudioTrack = new AudioTrack( 720 AudioManager.STREAM_MUSIC, 721 sampleRateInHz, 722 channelConfig, 723 audioFormat, 724 minBufferSizeInBytes, 725 AudioTrack.MODE_STREAM); 726 } 727 728 return mAudioTrack; 729 } 730 releaseOutputBuffer(int bufferIndex, long renderTimestampNs)731 public void releaseOutputBuffer(int bufferIndex, long renderTimestampNs) { 732 synchronized (mConditionCallback) { 733 if (mDecoder != null) { 734 if (renderTimestampNs == NO_TIMESTAMP) { 735 mDecoder.releaseOutputBuffer(bufferIndex, false /* render */); 736 } else { 737 mDecoder.releaseOutputBuffer(bufferIndex, renderTimestampNs); 738 } 739 } 740 } 741 } 742 743 @Override onError(MediaCodec codec, MediaCodec.CodecException e)744 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 745 } 746 747 @Override onInputBufferAvailable(MediaCodec codec, int index)748 public void onInputBufferAvailable(MediaCodec codec, int index) { 749 synchronized (mConditionCallback) { 750 if (mExtractor == null || mExtractor.getSampleTrackIndex() == -1 751 || mSignaledEos || mDecoder != codec) { 752 return; 753 } 754 755 ByteBuffer buffer = codec.getInputBuffer(index); 756 int size = mExtractor.readSampleData(buffer, 0); 757 long timestampUs = mExtractor.getSampleTime(); 758 mExtractor.advance(); 759 mSignaledEos = mExtractor.getSampleTrackIndex() == -1 760 || timestampUs >= mLastBufferTimestampUs; 761 codec.queueInputBuffer( 762 index, 763 0, 764 size, 765 timestampUs, 766 mSignaledEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 767 } 768 } 769 770 @Override onOutputBufferAvailable( MediaCodec codec, int index, MediaCodec.BufferInfo info)771 public void onOutputBufferAvailable( 772 MediaCodec codec, int index, MediaCodec.BufferInfo info) { 773 synchronized (mConditionCallback) { 774 if (mEos || mDecoder != codec) { 775 return; 776 } 777 778 mEos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; 779 780 if (info.size > 0) { 781 if (mIsAudio) { 782 ByteBuffer outputByteBuffer = codec.getOutputBuffer(index); 783 synchronized (mAudioBufferLock) { 784 mAudioBuffers.add(new AudioBuffer(outputByteBuffer, index)); 785 } 786 mMediaSync.queueAudio( 787 outputByteBuffer, 788 index, 789 info.presentationTimeUs); 790 if (mStartingAudioTimestampUs >= 0 791 && info.presentationTimeUs >= mStartingAudioTimestampUs) { 792 mMediaSyncTest.onTaggedAudioBufferIndex(this, index); 793 mStartingAudioTimestampUs = NO_TIMESTAMP; 794 } 795 } else { 796 codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000); 797 } 798 } else { 799 codec.releaseOutputBuffer(index, false); 800 } 801 } 802 803 if (mEos) { 804 mMediaSyncTest.onEos(this); 805 } 806 } 807 808 @Override onOutputFormatChanged(MediaCodec codec, MediaFormat format)809 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 810 } 811 checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex)812 public void checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 813 synchronized (mAudioBufferLock) { 814 AudioBuffer audioBuffer = mAudioBuffers.get(0); 815 if (audioBuffer.mByteBuffer != byteBuffer 816 || audioBuffer.mBufferIndex != bufferIndex) { 817 fail("returned buffer doesn't match what's sent"); 818 } 819 mAudioBuffers.remove(0); 820 } 821 } 822 } 823 } 824