1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import android.media.cts.R; 19 20 import static org.junit.Assert.assertNotNull; 21 22 import com.android.compatibility.common.util.ApiLevelUtil; 23 import com.android.compatibility.common.util.MediaUtils; 24 25 import android.annotation.TargetApi; 26 import android.annotation.SuppressLint; 27 import android.app.Activity; 28 import android.content.Context; 29 import android.content.Intent; 30 import android.content.pm.ActivityInfo; 31 import android.content.res.AssetFileDescriptor; 32 import android.content.res.Configuration; 33 import android.content.res.Resources; 34 import android.graphics.Bitmap; 35 import android.graphics.Bitmap.Config; 36 import android.graphics.BitmapFactory; 37 import android.graphics.Color; 38 import android.graphics.SurfaceTexture; 39 import android.media.MediaCodec; 40 import android.media.MediaCodec.BufferInfo; 41 import android.media.MediaCodec.CodecException; 42 import android.media.MediaCodecInfo.VideoCapabilities; 43 import android.media.MediaCodecList; 44 import android.media.MediaExtractor; 45 import android.media.MediaFormat; 46 import android.net.Uri; 47 import android.opengl.EGL14; 48 import android.opengl.GLES11Ext; 49 import android.opengl.GLES20; 50 import android.opengl.GLSurfaceView; 51 import android.os.Build; 52 import android.os.Handler; 53 import android.os.HandlerThread; 54 import android.os.Looper; 55 import android.os.SystemClock; 56 import android.support.test.rule.ActivityTestRule; 57 import android.util.Log; 58 import android.util.Pair; 59 import android.util.SparseArray; 60 import android.view.PixelCopy; 61 import android.view.PixelCopy.OnPixelCopyFinishedListener; 62 import android.view.Surface; 63 import android.view.SurfaceHolder; 64 import android.view.SurfaceView; 65 import android.view.TextureView; 66 import android.view.View; 67 import android.view.ViewGroup; 68 import android.widget.RelativeLayout; 69 70 import java.io.File; 71 import java.io.IOException; 72 import java.nio.ByteBuffer; 73 import java.nio.ByteOrder; 74 import java.nio.FloatBuffer; 75 import java.util.concurrent.TimeUnit; 76 import java.util.HashMap; 77 78 import javax.microedition.khronos.egl.EGL10; 79 import javax.microedition.khronos.egl.EGLConfig; 80 import javax.microedition.khronos.egl.EGLContext; 81 import javax.microedition.khronos.egl.EGLDisplay; 82 import javax.microedition.khronos.egl.EGLSurface; 83 84 import org.junit.After; 85 import org.junit.Before; 86 import org.junit.Rule; 87 88 @TargetApi(16) 89 public class DecodeAccuracyTestBase { 90 91 protected Context mContext; 92 protected Resources mResources; 93 protected DecodeAccuracyTestActivity mActivity; 94 protected TestHelper testHelper; 95 96 @Rule 97 public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule = 98 new ActivityTestRule<>(DecodeAccuracyTestActivity.class); 99 100 @Before setUp()101 public void setUp() throws Exception { 102 mActivity = mActivityRule.getActivity(); 103 mContext = mActivity.getApplicationContext(); 104 mResources = mActivity.getResources(); 105 testHelper = new TestHelper(mContext, mActivity); 106 } 107 108 @After tearDown()109 public void tearDown() throws Exception { 110 mActivity = null; 111 mResources = null; 112 mContext = null; 113 mActivityRule = null; 114 } 115 bringActivityToFront()116 protected void bringActivityToFront() { 117 Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class); 118 intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT); 119 mActivity.startActivity(intent); 120 } 121 getHelper()122 protected TestHelper getHelper() { 123 return testHelper; 124 } 125 checkNotNull(T reference)126 public static <T> T checkNotNull(T reference) { 127 assertNotNull(reference); 128 return reference; 129 } 130 checkNotNull(String msg, T reference)131 public static <T> T checkNotNull(String msg, T reference) { 132 assertNotNull(msg, reference); 133 return reference; 134 } 135 136 /* Simple Player that decodes a local video file only. */ 137 @TargetApi(16) 138 static class SimplePlayer { 139 140 public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 5; // 5 FPS 141 public static final long STARTUP_ALLOW_MS = TimeUnit.SECONDS.toMillis(1) ; 142 public static final int END_OF_STREAM = -1; 143 public static final int DEQUEUE_SUCCESS = 1; 144 public static final int DEQUEUE_FAIL = 0; 145 146 private static final String TAG = SimplePlayer.class.getSimpleName(); 147 private static final int NO_TRACK_INDEX = -3; 148 private static final long DEQUEUE_TIMEOUT_US = 20; 149 150 private final Context context; 151 private final MediaExtractor extractor; 152 private final String codecName; 153 private MediaCodec decoder; 154 private byte[] outputBytes; 155 private boolean renderToSurface; 156 private MediaCodecList mediaCodecList; 157 private Surface surface; 158 SimplePlayer(Context context)159 public SimplePlayer(Context context) { 160 this(context, null); 161 } 162 SimplePlayer(Context context, String codecName)163 public SimplePlayer(Context context, String codecName) { 164 this.context = checkNotNull(context); 165 this.codecName = codecName; 166 this.extractor = new MediaExtractor(); 167 this.renderToSurface = false; 168 this.surface = null; 169 } 170 171 /** 172 * The function play the corresponding file for certain number of frames. 173 * 174 * @param surface is the surface view of decoder output. 175 * @param videoFormat is the format of the video to extract and decode. 176 * @param numOfTotalFrames is the number of Frame wish to play. 177 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 178 * @return {@link PlayerResult} that consists the result. 179 */ decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, boolean releasePlayer)180 public PlayerResult decodeVideoFrames( 181 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, 182 boolean releasePlayer) { 183 this.surface = surface; 184 PlayerResult playerResult; 185 if (prepareVideoDecode(videoFormat)) { 186 if (startDecoder()) { 187 final long timeout = 188 Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames + STARTUP_ALLOW_MS; 189 playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap); 190 } else { 191 playerResult = PlayerResult.failToStart(); 192 } 193 } else { 194 playerResult = new PlayerResult(); 195 } 196 if (releasePlayer) { 197 release(); 198 } 199 return new PlayerResult(playerResult); 200 } 201 decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)202 public PlayerResult decodeVideoFrames( 203 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) { 204 return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0, false); 205 } 206 207 /** 208 * The function sets up the extractor and video decoder with proper format. 209 * This must be called before doing starting up the decoder. 210 */ prepareVideoDecode(VideoFormat videoFormat)211 private boolean prepareVideoDecode(VideoFormat videoFormat) { 212 MediaFormat mediaFormat = prepareExtractor(videoFormat); 213 if (mediaFormat == null) { 214 return false; 215 } 216 configureVideoFormat(mediaFormat, videoFormat); 217 setRenderToSurface(surface != null); 218 return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat); 219 } 220 221 /** 222 * Sets up the extractor and gets the {@link MediaFormat} of the track. 223 */ prepareExtractor(VideoFormat videoFormat)224 private MediaFormat prepareExtractor(VideoFormat videoFormat) { 225 if (!setExtractorDataSource(videoFormat)) { 226 return null; 227 } 228 final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat()); 229 if (trackNum == NO_TRACK_INDEX) { 230 return null; 231 } 232 extractor.selectTrack(trackNum); 233 return extractor.getTrackFormat(trackNum); 234 } 235 236 /** 237 * The function decode video frames and display in a surface. 238 * 239 * @param numOfTotalFrames is the number of frames to be decoded. 240 * @param timeOutMs is the time limit for decoding the frames. 241 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 242 * @return {@link PlayerResult} that consists the result. 243 */ decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)244 private PlayerResult decodeFramesAndPlay( 245 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) { 246 int numOfDecodedFrames = 0; 247 long firstOutputTimeMs = 0; 248 long lastFrameAt = 0; 249 final long loopStart = SystemClock.elapsedRealtime(); 250 251 while (numOfDecodedFrames < numOfTotalFrames 252 && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) { 253 try { 254 queueDecoderInputBuffer(); 255 } catch (IllegalStateException exception) { 256 Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception); 257 break; 258 } 259 try { 260 final int outputResult = dequeueDecoderOutputBuffer(); 261 if (outputResult == SimplePlayer.END_OF_STREAM) { 262 break; 263 } 264 if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) { 265 if (firstOutputTimeMs == 0) { 266 firstOutputTimeMs = SystemClock.elapsedRealtime(); 267 } 268 if (msPerFrameCap > 0) { 269 // Slow down if cap is set and not reached. 270 final long delayMs = 271 msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt); 272 if (lastFrameAt != 0 && delayMs > 0) { 273 final long threadDelayMs = 3; // In case of delay in thread. 274 if (delayMs > threadDelayMs) { 275 try { 276 Thread.sleep(delayMs - threadDelayMs); 277 } catch (InterruptedException ex) { /* */} 278 } 279 while (SystemClock.elapsedRealtime() - lastFrameAt 280 < msPerFrameCap) { /* */ } 281 } 282 lastFrameAt = SystemClock.elapsedRealtime(); 283 } 284 numOfDecodedFrames++; 285 } 286 } catch (IllegalStateException exception) { 287 Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception); 288 } 289 } 290 // NB: totalTime measures from "first output" instead of 291 // "first INPUT", so does not include first frame latency 292 // and therefore does not tell us if the timeout expired 293 final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs; 294 return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime); 295 } 296 297 /** 298 * Queues the input buffer with the media file one buffer at a time. 299 * 300 * @return true if success, fail otherwise. 301 */ queueDecoderInputBuffer()302 private boolean queueDecoderInputBuffer() { 303 ByteBuffer inputBuffer; 304 final ByteBuffer[] inputBufferArray = decoder.getInputBuffers(); 305 final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 306 if (inputBufferIndex >= 0) { 307 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 308 inputBuffer = inputBufferArray[inputBufferIndex]; 309 } else { 310 inputBuffer = decoder.getInputBuffer(inputBufferIndex); 311 } 312 final int sampleSize = extractor.readSampleData(inputBuffer, 0); 313 if (sampleSize > 0) { 314 decoder.queueInputBuffer( 315 inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0); 316 extractor.advance(); 317 } 318 return true; 319 } 320 return false; 321 } 322 323 /** 324 * Dequeues the output buffer. 325 * For video decoder, renders to surface if provided. 326 * For audio decoder, gets the bytes from the output buffer. 327 * 328 * @return an integer indicating its status (fail, success, or end of stream). 329 */ dequeueDecoderOutputBuffer()330 private int dequeueDecoderOutputBuffer() { 331 final BufferInfo info = new BufferInfo(); 332 final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 333 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 334 return END_OF_STREAM; 335 } 336 if (decoderStatus >= 0) { 337 // For JELLY_BEAN_MR2- devices, when rendering to a surface, 338 // info.size seems to always return 0 even if 339 // the decoder successfully decoded the frame. 340 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) { 341 return DEQUEUE_FAIL; 342 } 343 if (!renderToSurface) { 344 ByteBuffer outputBuffer; 345 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 346 outputBuffer = decoder.getOutputBuffers()[decoderStatus]; 347 } else { 348 outputBuffer = decoder.getOutputBuffer(decoderStatus); 349 } 350 outputBytes = new byte[info.size]; 351 outputBuffer.get(outputBytes); 352 outputBuffer.clear(); 353 } 354 decoder.releaseOutputBuffer(decoderStatus, renderToSurface); 355 return DEQUEUE_SUCCESS; 356 } 357 return DEQUEUE_FAIL; 358 } 359 release()360 public void release() { 361 decoderRelease(); 362 extractorRelease(); 363 } 364 setExtractorDataSource(VideoFormat videoFormat)365 private boolean setExtractorDataSource(VideoFormat videoFormat) { 366 checkNotNull(videoFormat); 367 try { 368 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor(context); 369 extractor.setDataSource( 370 afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); 371 afd.close(); 372 } catch (IOException exception) { 373 Log.e(TAG, "IOException in setDataSource", exception); 374 return false; 375 } 376 return true; 377 } 378 379 /** 380 * Creates a decoder based on conditions. 381 * 382 * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used. 383 * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)} 384 * is preferred on LOLLIPOP and up for finding out the codec name that 385 * supports the media format. 386 * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used. 387 */ createDecoder(MediaFormat mediaFormat)388 private boolean createDecoder(MediaFormat mediaFormat) { 389 try { 390 if (codecName != null) { 391 decoder = MediaCodec.createByCodecName(codecName); 392 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) { 393 if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) { 394 // On LOLLIPOP, format must not contain a frame rate. 395 mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null); 396 } 397 if (mediaCodecList == null) { 398 mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS); 399 } 400 decoder = MediaCodec.createByCodecName( 401 mediaCodecList.findDecoderForFormat(mediaFormat)); 402 } else { 403 decoder = MediaCodec.createDecoderByType( 404 mediaFormat.getString(MediaFormat.KEY_MIME)); 405 } 406 } catch (Exception exception) { 407 Log.e(TAG, "Exception during decoder creation", exception); 408 decoderRelease(); 409 return false; 410 } 411 return true; 412 } 413 configureDecoder(Surface surface, MediaFormat mediaFormat)414 private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) { 415 try { 416 decoder.configure(mediaFormat, surface, null, 0); 417 } catch (Exception exception) { 418 Log.e(TAG, "Exception during decoder configuration", exception); 419 try { 420 decoder.reset(); 421 } catch (Exception resetException) { 422 Log.e(TAG, "Exception during decoder reset", resetException); 423 } 424 decoderRelease(); 425 return false; 426 } 427 return true; 428 } 429 setRenderToSurface(boolean render)430 private void setRenderToSurface(boolean render) { 431 this.renderToSurface = render; 432 } 433 startDecoder()434 private boolean startDecoder() { 435 try { 436 decoder.start(); 437 } catch (Exception exception) { 438 Log.e(TAG, "Exception during decoder start", exception); 439 decoder.reset(); 440 decoderRelease(); 441 return false; 442 } 443 return true; 444 } 445 decoderRelease()446 private void decoderRelease() { 447 if (decoder == null) { 448 return; 449 } 450 try { 451 decoder.stop(); 452 } catch (IllegalStateException exception) { 453 decoder.reset(); 454 // IllegalStateException happens when decoder fail to start. 455 Log.e(TAG, "IllegalStateException during decoder stop", exception); 456 } finally { 457 try { 458 decoder.release(); 459 } catch (IllegalStateException exception) { 460 Log.e(TAG, "IllegalStateException during decoder release", exception); 461 } 462 decoder = null; 463 } 464 } 465 extractorRelease()466 private void extractorRelease() { 467 if (extractor == null) { 468 return; 469 } 470 try { 471 extractor.release(); 472 } catch (IllegalStateException exception) { 473 Log.e(TAG, "IllegalStateException during extractor release", exception); 474 } 475 } 476 configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)477 private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) { 478 checkNotNull(mediaFormat); 479 checkNotNull(videoFormat); 480 videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME)); 481 videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH)); 482 videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)); 483 mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth()); 484 mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight()); 485 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) { 486 return; 487 } 488 // Set KEY_MAX_WIDTH and KEY_MAX_HEIGHT when isAbrEnabled() is set. 489 if (videoFormat.isAbrEnabled()) { 490 try { 491 // Check for max resolution supported by the codec. 492 final MediaCodec decoder = MediaUtils.getDecoder(mediaFormat); 493 final VideoCapabilities videoCapabilities = MediaUtils.getVideoCapabilities( 494 decoder.getName(), videoFormat.getMimeType()); 495 decoder.release(); 496 final int maxWidth = videoCapabilities.getSupportedWidths().getUpper(); 497 final int maxHeight = 498 videoCapabilities.getSupportedHeightsFor(maxWidth).getUpper(); 499 if (maxWidth >= videoFormat.getWidth() && maxHeight >= videoFormat.getHeight()) { 500 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, maxWidth); 501 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, maxHeight); 502 return; 503 } 504 } catch (NullPointerException exception) { /* */ } 505 // Set max width/height to current size if can't get codec's max supported 506 // width/height or max is not greater than the current size. 507 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getWidth()); 508 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getHeight()); 509 } 510 } 511 512 /** 513 * The function returns the first track found based on the media type. 514 */ getFirstTrackIndexByType(String format)515 private int getFirstTrackIndexByType(String format) { 516 for (int i = 0; i < extractor.getTrackCount(); i++) { 517 MediaFormat trackMediaFormat = extractor.getTrackFormat(i); 518 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) { 519 return i; 520 } 521 } 522 Log.e(TAG, "couldn't get a " + format + " track"); 523 return NO_TRACK_INDEX; 524 } 525 526 /** 527 * Stores the result from SimplePlayer. 528 */ 529 public static final class PlayerResult { 530 531 public static final int UNSET = -1; 532 private final boolean configureSuccess; 533 private final boolean startSuccess; 534 private final boolean decodeSuccess; 535 private final long totalTime; 536 PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)537 public PlayerResult( 538 boolean configureSuccess, boolean startSuccess, 539 boolean decodeSuccess, long totalTime) { 540 this.configureSuccess = configureSuccess; 541 this.startSuccess = startSuccess; 542 this.decodeSuccess = decodeSuccess; 543 this.totalTime = totalTime; 544 } 545 PlayerResult(PlayerResult playerResult)546 public PlayerResult(PlayerResult playerResult) { 547 this(playerResult.configureSuccess, playerResult.startSuccess, 548 playerResult.decodeSuccess, playerResult.totalTime); 549 } 550 PlayerResult()551 public PlayerResult() { 552 // Dummy PlayerResult. 553 this(false, false, false, UNSET); 554 } 555 failToStart()556 public static PlayerResult failToStart() { 557 return new PlayerResult(true, false, false, UNSET); 558 } 559 getFailureMessage()560 public String getFailureMessage() { 561 if (!configureSuccess) { 562 return "Failed to configure decoder."; 563 } else if (!startSuccess) { 564 return "Failed to start decoder."; 565 } else if (!decodeSuccess) { 566 return "Failed to decode the expected number of frames."; 567 } else { 568 return "Failed to finish decoding."; 569 } 570 } 571 isConfigureSuccess()572 public boolean isConfigureSuccess() { 573 return configureSuccess; 574 } 575 isSuccess()576 public boolean isSuccess() { 577 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET; 578 } 579 getTotalTime()580 public long getTotalTime() { 581 return totalTime; 582 } 583 584 } 585 586 } 587 588 /* Utility class for collecting common test case functionality. */ 589 class TestHelper { 590 591 private final String TAG = TestHelper.class.getSimpleName(); 592 593 private final Context context; 594 private final Handler handler; 595 private final Activity activity; 596 TestHelper(Context context, Activity activity)597 public TestHelper(Context context, Activity activity) { 598 this.context = checkNotNull(context); 599 this.handler = new Handler(Looper.getMainLooper()); 600 this.activity = activity; 601 } 602 generateBitmapFromImageResourceId(int resourceId)603 public Bitmap generateBitmapFromImageResourceId(int resourceId) { 604 return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId)); 605 } 606 getContext()607 public Context getContext() { 608 return context; 609 } 610 rotateOrientation()611 public void rotateOrientation() { 612 handler.post(new Runnable() { 613 @Override 614 public void run() { 615 final int orientation = context.getResources().getConfiguration().orientation; 616 if (orientation == Configuration.ORIENTATION_PORTRAIT) { 617 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 618 } else { 619 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); 620 } 621 } 622 }); 623 } 624 unsetOrientation()625 public void unsetOrientation() { 626 handler.post(new Runnable() { 627 @Override 628 public void run() { 629 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); 630 } 631 }); 632 } 633 generateView(View view)634 public void generateView(View view) { 635 RelativeLayout relativeLayout = 636 (RelativeLayout) activity.findViewById(R.id.attach_view); 637 ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view); 638 handler.post(viewGenerator); 639 } 640 cleanUpView(View view)641 public void cleanUpView(View view) { 642 ViewCleaner viewCleaner = new ViewCleaner(view); 643 handler.post(viewCleaner); 644 } 645 generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)646 public Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) { 647 handler.post(snapshot); 648 synchronized (snapshot.getSyncObject()) { 649 try { 650 snapshot.getSyncObject().wait(snapshot.SNAPSHOT_TIMEOUT_MS + 100); 651 } catch (InterruptedException e) { 652 e.printStackTrace(); 653 Log.e(TAG, "Unable to finish generateBitmapFromVideoViewSnapshot()."); 654 return null; 655 } 656 } 657 if (!snapshot.isBitmapReady()) { 658 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot()."); 659 return null; 660 } 661 return snapshot.getBitmap(); 662 } 663 664 private class ViewGenerator implements Runnable { 665 666 private final View view; 667 private final RelativeLayout relativeLayout; 668 ViewGenerator(RelativeLayout relativeLayout, View view)669 public ViewGenerator(RelativeLayout relativeLayout, View view) { 670 this.view = checkNotNull(view); 671 this.relativeLayout = checkNotNull(relativeLayout); 672 } 673 674 @Override run()675 public void run() { 676 if (view.getParent() != null) { 677 ((ViewGroup) view.getParent()).removeView(view); 678 } 679 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( 680 VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT); 681 view.setLayoutParams(params); 682 relativeLayout.addView(view); 683 } 684 685 } 686 687 private class ViewCleaner implements Runnable { 688 689 private final View view; 690 ViewCleaner(View view)691 public ViewCleaner(View view) { 692 this.view = checkNotNull(view); 693 } 694 695 @Override run()696 public void run() { 697 if (view.getParent() != null) { 698 ((ViewGroup) view.getParent()).removeView(view); 699 } 700 } 701 702 } 703 704 } 705 706 } 707 708 /* Factory for manipulating a {@link View}. */ 709 abstract class VideoViewFactory { 710 711 public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1); 712 public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3); 713 public static final int VIEW_WIDTH = 480; 714 public static final int VIEW_HEIGHT = 360; 715 VideoViewFactory()716 public VideoViewFactory() {} 717 release()718 public abstract void release(); 719 getName()720 public abstract String getName(); 721 createView(Context context)722 public abstract View createView(Context context); 723 waitForViewIsAvailable()724 public void waitForViewIsAvailable() throws Exception { 725 waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS); 726 }; 727 waitForViewIsAvailable(long timeOutMs)728 public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception; 729 getSurface()730 public abstract Surface getSurface(); 731 getVideoViewSnapshot()732 public abstract VideoViewSnapshot getVideoViewSnapshot(); 733 hasLooper()734 public boolean hasLooper() { 735 return Looper.myLooper() != null; 736 } 737 738 } 739 740 /* Factory for building a {@link TextureView}. */ 741 @TargetApi(16) 742 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener { 743 744 private static final String TAG = TextureViewFactory.class.getSimpleName(); 745 private static final String NAME = "TextureView"; 746 747 private final Object syncToken = new Object(); 748 private TextureView textureView; 749 TextureViewFactory()750 public TextureViewFactory() {} 751 752 @Override createView(Context context)753 public TextureView createView(Context context) { 754 Log.i(TAG, "Creating a " + NAME); 755 textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context)); 756 textureView.setSurfaceTextureListener(this); 757 return textureView; 758 } 759 760 @Override release()761 public void release() { 762 textureView = null; 763 } 764 765 @Override getName()766 public String getName() { 767 return NAME; 768 } 769 770 @Override getSurface()771 public Surface getSurface() { 772 return new Surface(textureView.getSurfaceTexture()); 773 } 774 775 @Override getVideoViewSnapshot()776 public TextureViewSnapshot getVideoViewSnapshot() { 777 return new TextureViewSnapshot(textureView); 778 } 779 780 @Override waitForViewIsAvailable(long timeOutMs)781 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 782 final long start = SystemClock.elapsedRealtime(); 783 while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) { 784 synchronized (syncToken) { 785 try { 786 syncToken.wait(VIEW_WAITTIME_MS); 787 } catch (InterruptedException e) { 788 Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e); 789 throw new InterruptedException(e.getMessage()); 790 } 791 } 792 } 793 if (!textureView.isAvailable()) { 794 throw new InterruptedException("Taking too long to attach a TextureView to a window."); 795 } 796 Log.i(TAG, NAME + " is available."); 797 } 798 799 @Override onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)800 public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { 801 synchronized (syncToken) { 802 syncToken.notify(); 803 } 804 } 805 806 @Override onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)807 public void onSurfaceTextureSizeChanged( 808 SurfaceTexture surfaceTexture, int width, int height) {} 809 810 @Override onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)811 public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { 812 return false; 813 } 814 815 @Override onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)816 public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {} 817 818 } 819 820 /** 821 * Factory for building a {@link SurfaceView} 822 */ 823 @TargetApi(24) 824 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback { 825 826 private static final String TAG = SurfaceViewFactory.class.getSimpleName(); 827 private static final String NAME = "SurfaceView"; 828 private final Object syncToken = new Object(); 829 830 private SurfaceView surfaceView; 831 private SurfaceHolder surfaceHolder; 832 SurfaceViewFactory()833 public SurfaceViewFactory() {} 834 835 @Override release()836 public void release() { 837 surfaceView = null; 838 surfaceHolder = null; 839 } 840 841 @Override getName()842 public String getName() { 843 return NAME; 844 } 845 846 @Override createView(Context context)847 public View createView(Context context) { 848 Log.i(TAG, "Creating a " + NAME); 849 if (!super.hasLooper()) { 850 Looper.prepare(); 851 } 852 surfaceView = new SurfaceView(context); 853 surfaceHolder = surfaceView.getHolder(); 854 surfaceHolder.addCallback(this); 855 return surfaceView; 856 } 857 858 @Override waitForViewIsAvailable(long timeOutMs)859 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 860 final long start = SystemClock.elapsedRealtime(); 861 while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) { 862 synchronized (syncToken) { 863 try { 864 syncToken.wait(VIEW_WAITTIME_MS); 865 } catch (InterruptedException e) { 866 Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e); 867 throw new InterruptedException(e.getMessage()); 868 } 869 } 870 } 871 if (!getSurface().isValid()) { 872 throw new InterruptedException("Taking too long to attach a SurfaceView to a window."); 873 } 874 Log.i(TAG, NAME + " is available."); 875 } 876 877 @Override getSurface()878 public Surface getSurface() { 879 return surfaceHolder == null ? null : surfaceHolder.getSurface(); 880 } 881 882 @Override getVideoViewSnapshot()883 public VideoViewSnapshot getVideoViewSnapshot() { 884 return new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT); 885 } 886 887 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)888 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 889 890 @Override surfaceCreated(SurfaceHolder holder)891 public void surfaceCreated(SurfaceHolder holder) { 892 synchronized (syncToken) { 893 syncToken.notify(); 894 } 895 } 896 897 @Override surfaceDestroyed(SurfaceHolder holder)898 public void surfaceDestroyed(SurfaceHolder holder) {} 899 900 } 901 902 /** 903 * Factory for building EGL and GLES that could render to GLSurfaceView. 904 * {@link GLSurfaceView} {@link EGL10} {@link GLES20}. 905 */ 906 @TargetApi(16) 907 class GLSurfaceViewFactory extends VideoViewFactory { 908 909 private static final String TAG = GLSurfaceViewFactory.class.getSimpleName(); 910 private static final String NAME = "GLSurfaceView"; 911 912 private final Object surfaceSyncToken = new Object(); 913 914 private GLSurfaceViewThread glSurfaceViewThread; 915 private boolean byteBufferIsReady = false; 916 GLSurfaceViewFactory()917 public GLSurfaceViewFactory() {} 918 919 @Override release()920 public void release() { 921 glSurfaceViewThread.release(); 922 glSurfaceViewThread = null; 923 } 924 925 @Override getName()926 public String getName() { 927 return NAME; 928 } 929 930 @Override createView(Context context)931 public View createView(Context context) { 932 Log.i(TAG, "Creating a " + NAME); 933 // Do all GL rendering in the GL thread. 934 glSurfaceViewThread = new GLSurfaceViewThread(); 935 glSurfaceViewThread.start(); 936 // No necessary view to display, return null. 937 return null; 938 } 939 940 @Override waitForViewIsAvailable(long timeOutMs)941 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 942 final long start = SystemClock.elapsedRealtime(); 943 while (SystemClock.elapsedRealtime() - start < timeOutMs 944 && glSurfaceViewThread.getSurface() == null) { 945 synchronized (surfaceSyncToken) { 946 try { 947 surfaceSyncToken.wait(VIEW_WAITTIME_MS); 948 } catch (InterruptedException e) { 949 Log.e(TAG, "Exception occurred when waiting for the surface from" 950 + " GLSurfaceView to become available.", e); 951 throw new InterruptedException(e.getMessage()); 952 } 953 } 954 } 955 if (glSurfaceViewThread.getSurface() == null) { 956 throw new InterruptedException("Taking too long for the surface from" 957 + " GLSurfaceView to become available."); 958 } 959 Log.i(TAG, NAME + " is available."); 960 } 961 962 @Override getSurface()963 public Surface getSurface() { 964 return glSurfaceViewThread.getSurface(); 965 } 966 967 @Override getVideoViewSnapshot()968 public VideoViewSnapshot getVideoViewSnapshot() { 969 return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT); 970 } 971 byteBufferIsReady()972 public boolean byteBufferIsReady() { 973 return byteBufferIsReady; 974 } 975 getByteBuffer()976 public ByteBuffer getByteBuffer() { 977 return glSurfaceViewThread.getByteBuffer(); 978 } 979 980 /* Does all GL operations. */ 981 private class GLSurfaceViewThread extends Thread 982 implements SurfaceTexture.OnFrameAvailableListener { 983 984 private static final int FLOAT_SIZE_BYTES = 4; 985 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 986 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 987 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 988 private FloatBuffer triangleVertices; 989 private float[] textureTransform = new float[16]; 990 991 private float[] triangleVerticesData = { 992 // X, Y, Z, U, V 993 -1f, -1f, 0f, 0f, 1f, 994 1f, -1f, 0f, 1f, 1f, 995 -1f, 1f, 0f, 0f, 0f, 996 1f, 1f, 0f, 1f, 0f, 997 }; 998 // Make the top-left corner corresponds to texture coordinate 999 // (0, 0). This complies with the transformation matrix obtained from 1000 // SurfaceTexture.getTransformMatrix. 1001 1002 private static final String VERTEX_SHADER = 1003 "attribute vec4 aPosition;\n" 1004 + "attribute vec4 aTextureCoord;\n" 1005 + "uniform mat4 uTextureTransform;\n" 1006 + "varying vec2 vTextureCoord;\n" 1007 + "void main() {\n" 1008 + " gl_Position = aPosition;\n" 1009 + " vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n" 1010 + "}\n"; 1011 1012 private static final String FRAGMENT_SHADER = 1013 "#extension GL_OES_EGL_image_external : require\n" 1014 + "precision mediump float;\n" // highp here doesn't seem to matter 1015 + "varying vec2 vTextureCoord;\n" 1016 + "uniform samplerExternalOES sTexture;\n" 1017 + "void main() {\n" 1018 + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" 1019 + "}\n"; 1020 1021 private int glProgram; 1022 private int textureID = -1; 1023 private int aPositionHandle; 1024 private int aTextureHandle; 1025 private int uTextureTransformHandle; 1026 private EGLDisplay eglDisplay = null; 1027 private EGLContext eglContext = null; 1028 private EGLSurface eglSurface = null; 1029 private EGL10 egl10; 1030 private Surface surface = null; 1031 private SurfaceTexture surfaceTexture; 1032 private ByteBuffer byteBuffer; 1033 private Looper looper; 1034 GLSurfaceViewThread()1035 public GLSurfaceViewThread() {} 1036 1037 @Override run()1038 public void run() { 1039 Looper.prepare(); 1040 looper = Looper.myLooper(); 1041 triangleVertices = ByteBuffer 1042 .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES) 1043 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 1044 triangleVertices.put(triangleVerticesData).position(0); 1045 1046 eglSetup(); 1047 makeCurrent(); 1048 eglSurfaceCreated(); 1049 1050 surfaceTexture = new SurfaceTexture(getTextureId()); 1051 surfaceTexture.setOnFrameAvailableListener(this); 1052 surface = new Surface(surfaceTexture); 1053 synchronized (surfaceSyncToken) { 1054 surfaceSyncToken.notify(); 1055 } 1056 // Store pixels from surface 1057 byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4); 1058 byteBuffer.order(ByteOrder.LITTLE_ENDIAN); 1059 Looper.loop(); 1060 } 1061 1062 @Override onFrameAvailable(SurfaceTexture st)1063 public void onFrameAvailable(SurfaceTexture st) { 1064 checkGlError("before updateTexImage"); 1065 surfaceTexture.updateTexImage(); 1066 st.getTransformMatrix(textureTransform); 1067 drawFrame(); 1068 saveFrame(); 1069 } 1070 1071 /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */ eglSetup()1072 public void eglSetup() { 1073 egl10 = (EGL10) EGLContext.getEGL(); 1074 eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 1075 if (eglDisplay == EGL10.EGL_NO_DISPLAY) { 1076 throw new RuntimeException("unable to get egl10 display"); 1077 } 1078 int[] version = new int[2]; 1079 if (!egl10.eglInitialize(eglDisplay, version)) { 1080 eglDisplay = null; 1081 throw new RuntimeException("unable to initialize egl10"); 1082 } 1083 // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB. 1084 int[] configAttribs = { 1085 EGL10.EGL_RED_SIZE, 8, 1086 EGL10.EGL_GREEN_SIZE, 8, 1087 EGL10.EGL_BLUE_SIZE, 8, 1088 EGL10.EGL_ALPHA_SIZE, 8, 1089 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 1090 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, 1091 EGL10.EGL_NONE 1092 }; 1093 EGLConfig[] configs = new EGLConfig[1]; 1094 int[] numConfigs = new int[1]; 1095 if (!egl10.eglChooseConfig( 1096 eglDisplay, configAttribs, configs, configs.length, numConfigs)) { 1097 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 1098 } 1099 // Configure EGL context for OpenGL ES 2.0. 1100 int[] contextAttribs = { 1101 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 1102 EGL10.EGL_NONE 1103 }; 1104 eglContext = egl10.eglCreateContext( 1105 eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs); 1106 checkEglError("eglCreateContext"); 1107 if (eglContext == null) { 1108 throw new RuntimeException("null context"); 1109 } 1110 // Create a pbuffer surface. 1111 int[] surfaceAttribs = { 1112 EGL10.EGL_WIDTH, VIEW_WIDTH, 1113 EGL10.EGL_HEIGHT, VIEW_HEIGHT, 1114 EGL10.EGL_NONE 1115 }; 1116 eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs); 1117 checkEglError("eglCreatePbufferSurface"); 1118 if (eglSurface == null) { 1119 throw new RuntimeException("surface was null"); 1120 } 1121 } 1122 release()1123 public void release() { 1124 looper.quit(); 1125 if (eglDisplay != EGL10.EGL_NO_DISPLAY) { 1126 egl10.eglMakeCurrent(eglDisplay, 1127 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 1128 egl10.eglDestroySurface(eglDisplay, eglSurface); 1129 egl10.eglDestroyContext(eglDisplay, eglContext); 1130 egl10.eglTerminate(eglDisplay); 1131 } 1132 eglDisplay = EGL10.EGL_NO_DISPLAY; 1133 eglContext = EGL10.EGL_NO_CONTEXT; 1134 eglSurface = EGL10.EGL_NO_SURFACE; 1135 surface.release(); 1136 surfaceTexture.release(); 1137 byteBufferIsReady = false; 1138 byteBuffer = null; 1139 } 1140 1141 /* Makes our EGL context and surface current. */ makeCurrent()1142 public void makeCurrent() { 1143 if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { 1144 throw new RuntimeException("eglMakeCurrent failed"); 1145 } 1146 checkEglError("eglMakeCurrent"); 1147 } 1148 1149 /* Call this after the EGL Surface is created and made current. */ eglSurfaceCreated()1150 public void eglSurfaceCreated() { 1151 glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 1152 if (glProgram == 0) { 1153 throw new RuntimeException("failed creating program"); 1154 } 1155 aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition"); 1156 checkLocation(aPositionHandle, "aPosition"); 1157 aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord"); 1158 checkLocation(aTextureHandle, "aTextureCoord"); 1159 uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform"); 1160 checkLocation(uTextureTransformHandle, "uTextureTransform"); 1161 1162 int[] textures = new int[1]; 1163 GLES20.glGenTextures(1, textures, 0); 1164 checkGlError("glGenTextures"); 1165 textureID = textures[0]; 1166 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1167 checkGlError("glBindTexture"); 1168 1169 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 1170 GLES20.GL_LINEAR); 1171 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 1172 GLES20.GL_LINEAR); 1173 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1174 GLES20.GL_CLAMP_TO_EDGE); 1175 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1176 GLES20.GL_CLAMP_TO_EDGE); 1177 checkGlError("glTexParameter"); 1178 } 1179 drawFrame()1180 public void drawFrame() { 1181 GLES20.glUseProgram(glProgram); 1182 checkGlError("glUseProgram"); 1183 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1184 checkGlError("glActiveTexture"); 1185 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1186 checkGlError("glBindTexture"); 1187 1188 triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1189 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1190 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1191 checkGlError("glVertexAttribPointer aPositionHandle"); 1192 GLES20.glEnableVertexAttribArray(aPositionHandle); 1193 checkGlError("glEnableVertexAttribArray aPositionHandle"); 1194 1195 triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1196 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1197 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1198 checkGlError("glVertexAttribPointer aTextureHandle"); 1199 GLES20.glEnableVertexAttribArray(aTextureHandle); 1200 checkGlError("glEnableVertexAttribArray aTextureHandle"); 1201 1202 GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0); 1203 checkGlError("glUniformMatrix uTextureTransformHandle"); 1204 1205 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1206 checkGlError("glDrawArrays"); 1207 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 1208 } 1209 1210 /* Reads the pixels to a ByteBuffer. */ saveFrame()1211 public void saveFrame() { 1212 byteBufferIsReady = false; 1213 byteBuffer.clear(); 1214 GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA, 1215 GLES20.GL_UNSIGNED_BYTE, byteBuffer); 1216 byteBufferIsReady = true; 1217 } 1218 getTextureId()1219 public int getTextureId() { 1220 return textureID; 1221 } 1222 getSurface()1223 public Surface getSurface() { 1224 return surface; 1225 } 1226 getByteBuffer()1227 public ByteBuffer getByteBuffer() { 1228 return byteBuffer; 1229 } 1230 loadShader(int shaderType, String source)1231 private int loadShader(int shaderType, String source) { 1232 int shader = GLES20.glCreateShader(shaderType); 1233 checkGlError("glCreateShader type=" + shaderType); 1234 GLES20.glShaderSource(shader, source); 1235 GLES20.glCompileShader(shader); 1236 int[] compiled = new int[1]; 1237 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 1238 1239 if (compiled[0] == 0) { 1240 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 1241 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 1242 GLES20.glDeleteShader(shader); 1243 shader = 0; 1244 } 1245 return shader; 1246 } 1247 createProgram(String vertexSource, String fragmentSource)1248 private int createProgram(String vertexSource, String fragmentSource) { 1249 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 1250 if (vertexShader == 0) { 1251 return 0; 1252 } 1253 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 1254 if (pixelShader == 0) { 1255 return 0; 1256 } 1257 int program = GLES20.glCreateProgram(); 1258 if (program == 0) { 1259 Log.e(TAG, "Could not create program"); 1260 } 1261 GLES20.glAttachShader(program, vertexShader); 1262 checkGlError("glAttachShader"); 1263 GLES20.glAttachShader(program, pixelShader); 1264 checkGlError("glAttachShader"); 1265 GLES20.glLinkProgram(program); 1266 int[] linkStatus = new int[1]; 1267 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 1268 1269 if (linkStatus[0] != GLES20.GL_TRUE) { 1270 Log.e(TAG, "Could not link program: "); 1271 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 1272 GLES20.glDeleteProgram(program); 1273 program = 0; 1274 } 1275 return program; 1276 } 1277 checkEglError(String msg)1278 private void checkEglError(String msg) { 1279 int error; 1280 if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) { 1281 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 1282 } 1283 } 1284 checkGlError(String op)1285 public void checkGlError(String op) { 1286 int error; 1287 if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1288 Log.e(TAG, op + ": glError " + error); 1289 throw new RuntimeException(op + ": glError " + error); 1290 } 1291 } 1292 checkLocation(int location, String label)1293 public void checkLocation(int location, String label) { 1294 if (location < 0) { 1295 throw new RuntimeException("Unable to locate '" + label + "' in program"); 1296 } 1297 } 1298 } 1299 1300 } 1301 1302 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */ 1303 abstract class VideoViewSnapshot implements Runnable { 1304 1305 public static final long SNAPSHOT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(30); 1306 public static final long SLEEP_TIME_MS = 30; 1307 public static final Object SYNC_TOKEN = new Object(); 1308 getBitmap()1309 public abstract Bitmap getBitmap(); 1310 isBitmapReady()1311 public abstract boolean isBitmapReady(); 1312 getSyncObject()1313 public abstract Object getSyncObject(); 1314 1315 } 1316 1317 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. 1318 * This class is to be used together with 1319 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1320 */ 1321 class TextureViewSnapshot extends VideoViewSnapshot { 1322 1323 private final TextureView tv; 1324 private Bitmap bitmap = null; 1325 TextureViewSnapshot(TextureView tv)1326 public TextureViewSnapshot(TextureView tv) { 1327 this.tv = DecodeAccuracyTestBase.checkNotNull(tv); 1328 } 1329 1330 @Override run()1331 public void run() { 1332 bitmap = null; 1333 bitmap = tv.getBitmap(); 1334 synchronized (SYNC_TOKEN) { 1335 SYNC_TOKEN.notify(); 1336 } 1337 } 1338 1339 @Override getBitmap()1340 public Bitmap getBitmap() { 1341 return bitmap; 1342 } 1343 1344 @Override isBitmapReady()1345 public boolean isBitmapReady() { 1346 return bitmap != null; 1347 } 1348 1349 @Override getSyncObject()1350 public Object getSyncObject() { 1351 return SYNC_TOKEN; 1352 } 1353 1354 } 1355 1356 /** 1357 * Method to get bitmap of a {@link SurfaceView}. 1358 * Note that PixelCopy does not have to be called in a runnable. 1359 * This class is to be used together with 1360 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1361 */ 1362 class SurfaceViewSnapshot extends VideoViewSnapshot { 1363 1364 private static final String TAG = SurfaceViewSnapshot.class.getSimpleName(); 1365 private static final int PIXELCOPY_TIMEOUT_MS = 1000; 1366 private static final int INITIAL_STATE = -1; 1367 1368 private final SurfaceView surfaceView; 1369 private final int width; 1370 private final int height; 1371 1372 private Bitmap bitmap; 1373 private int copyResult; 1374 SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height)1375 public SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height) { 1376 this.surfaceView = surfaceView; 1377 this.width = width; 1378 this.height = height; 1379 this.copyResult = INITIAL_STATE; 1380 this.bitmap = null; 1381 } 1382 1383 @Override run()1384 public void run() { 1385 final long start = SystemClock.elapsedRealtime(); 1386 copyResult = INITIAL_STATE; 1387 final SynchronousPixelCopy copyHelper = new SynchronousPixelCopy(); 1388 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); 1389 try { 1390 // Wait for PixelCopy to finish. 1391 while ((copyResult = copyHelper.request(surfaceView, bitmap)) != PixelCopy.SUCCESS 1392 && (SystemClock.elapsedRealtime() - start) < SNAPSHOT_TIMEOUT_MS) { 1393 Thread.sleep(SLEEP_TIME_MS); 1394 } 1395 } catch (InterruptedException e) { 1396 Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e); 1397 bitmap = null; 1398 } finally { 1399 copyHelper.release(); 1400 synchronized (SYNC_TOKEN) { 1401 SYNC_TOKEN.notify(); 1402 } 1403 } 1404 } 1405 1406 @Override getBitmap()1407 public Bitmap getBitmap() { 1408 return bitmap; 1409 } 1410 1411 @Override isBitmapReady()1412 public boolean isBitmapReady() { 1413 return bitmap != null && copyResult == PixelCopy.SUCCESS; 1414 } 1415 1416 @Override getSyncObject()1417 public Object getSyncObject() { 1418 return SYNC_TOKEN; 1419 } 1420 1421 private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener { 1422 1423 private final Handler handler; 1424 private final HandlerThread thread; 1425 1426 private int status = INITIAL_STATE; 1427 SynchronousPixelCopy()1428 public SynchronousPixelCopy() { 1429 this.thread = new HandlerThread("PixelCopyHelper"); 1430 thread.start(); 1431 this.handler = new Handler(thread.getLooper()); 1432 } 1433 release()1434 public void release() { 1435 if (thread.isAlive()) { 1436 thread.quit(); 1437 } 1438 } 1439 request(SurfaceView source, Bitmap dest)1440 public int request(SurfaceView source, Bitmap dest) { 1441 synchronized (this) { 1442 try { 1443 PixelCopy.request(source, dest, this, handler); 1444 return getResultLocked(); 1445 } catch (Exception e) { 1446 Log.e(TAG, "Exception occurred when copying a SurfaceView.", e); 1447 return -1; 1448 } 1449 } 1450 } 1451 getResultLocked()1452 private int getResultLocked() { 1453 try { 1454 this.wait(PIXELCOPY_TIMEOUT_MS); 1455 } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ } 1456 return status; 1457 } 1458 1459 @Override onPixelCopyFinished(int copyResult)1460 public void onPixelCopyFinished(int copyResult) { 1461 synchronized (this) { 1462 status = copyResult; 1463 this.notify(); 1464 } 1465 } 1466 1467 } 1468 1469 } 1470 1471 /** 1472 * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler. 1473 * Note, because of how the bitmap is captured in GLSurfaceView, 1474 * this method does not have to be a runnable. 1475 * This class is to be used together with 1476 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1477 */ 1478 class GLSurfaceViewSnapshot extends VideoViewSnapshot { 1479 1480 private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName(); 1481 1482 private final GLSurfaceViewFactory glSurfaceViewFactory; 1483 private final int width; 1484 private final int height; 1485 1486 private Bitmap bitmap = null; 1487 private boolean bitmapIsReady = false; 1488 GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1489 public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) { 1490 this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory); 1491 this.width = width; 1492 this.height = height; 1493 } 1494 1495 @Override run()1496 public void run() { 1497 bitmapIsReady = false; 1498 bitmap = null; 1499 try { 1500 waitForByteBuffer(); 1501 } catch (InterruptedException exception) { 1502 Log.e(TAG, exception.getMessage()); 1503 bitmap = null; 1504 notifyObject(); 1505 return; 1506 } 1507 try { 1508 final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer(); 1509 bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 1510 byteBuffer.rewind(); 1511 bitmap.copyPixelsFromBuffer(byteBuffer); 1512 bitmapIsReady = true; 1513 byteBuffer.clear(); 1514 } catch (NullPointerException exception) { 1515 Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception); 1516 bitmap = null; 1517 } finally { 1518 notifyObject(); 1519 } 1520 } 1521 1522 @Override getBitmap()1523 public Bitmap getBitmap() { 1524 return bitmap; 1525 } 1526 1527 @Override isBitmapReady()1528 public boolean isBitmapReady() { 1529 return bitmapIsReady; 1530 } 1531 1532 @Override getSyncObject()1533 public Object getSyncObject() { 1534 return SYNC_TOKEN; 1535 } 1536 notifyObject()1537 private void notifyObject() { 1538 synchronized (SYNC_TOKEN) { 1539 SYNC_TOKEN.notify(); 1540 } 1541 } 1542 waitForByteBuffer()1543 private void waitForByteBuffer() throws InterruptedException { 1544 // Wait for byte buffer to be ready. 1545 final long start = SystemClock.elapsedRealtime(); 1546 while (SystemClock.elapsedRealtime() - start < SNAPSHOT_TIMEOUT_MS) { 1547 if (glSurfaceViewFactory.byteBufferIsReady()) { 1548 return; 1549 } 1550 Thread.sleep(SLEEP_TIME_MS); 1551 } 1552 throw new InterruptedException("Taking too long to read pixels into a ByteBuffer."); 1553 } 1554 1555 } 1556 1557 /* Stores information of a video file. */ 1558 class VideoFormat { 1559 1560 public static final String STRING_UNSET = "UNSET"; 1561 public static final int INT_UNSET = -1; 1562 1563 private final String filename; 1564 1565 private String mimeType = STRING_UNSET; 1566 private int width = INT_UNSET; 1567 private int height = INT_UNSET; 1568 private int maxWidth = INT_UNSET; 1569 private int maxHeight = INT_UNSET; 1570 private FilenameParser filenameParser; 1571 VideoFormat(String filename)1572 public VideoFormat(String filename) { 1573 this.filename = filename; 1574 } 1575 VideoFormat(VideoFormat videoFormat)1576 public VideoFormat(VideoFormat videoFormat) { 1577 this(videoFormat.filename); 1578 } 1579 getParsedName()1580 private FilenameParser getParsedName() { 1581 if (filenameParser == null) { 1582 filenameParser = new FilenameParser(filename); 1583 } 1584 return filenameParser; 1585 } 1586 getMediaFormat()1587 public String getMediaFormat() { 1588 return "video"; 1589 } 1590 setMimeType(String mimeType)1591 public void setMimeType(String mimeType) { 1592 this.mimeType = mimeType; 1593 } 1594 getMimeType()1595 public String getMimeType() { 1596 if (mimeType.equals(STRING_UNSET)) { 1597 return getParsedName().getMimeType(); 1598 } 1599 return mimeType; 1600 } 1601 setWidth(int width)1602 public void setWidth(int width) { 1603 this.width = width; 1604 } 1605 setMaxWidth(int maxWidth)1606 public void setMaxWidth(int maxWidth) { 1607 this.maxWidth = maxWidth; 1608 } 1609 getWidth()1610 public int getWidth() { 1611 if (width == INT_UNSET) { 1612 return getParsedName().getWidth(); 1613 } 1614 return width; 1615 } 1616 getMaxWidth()1617 public int getMaxWidth() { 1618 return maxWidth; 1619 } 1620 getOriginalWidth()1621 public int getOriginalWidth() { 1622 return getParsedName().getWidth(); 1623 } 1624 setHeight(int height)1625 public void setHeight(int height) { 1626 this.height = height; 1627 } 1628 setMaxHeight(int maxHeight)1629 public void setMaxHeight(int maxHeight) { 1630 this.maxHeight = maxHeight; 1631 } 1632 getHeight()1633 public int getHeight() { 1634 if (height == INT_UNSET) { 1635 return getParsedName().getHeight(); 1636 } 1637 return height; 1638 } 1639 getMaxHeight()1640 public int getMaxHeight() { 1641 return maxHeight; 1642 } 1643 getOriginalHeight()1644 public int getOriginalHeight() { 1645 return getParsedName().getHeight(); 1646 } 1647 isAbrEnabled()1648 public boolean isAbrEnabled() { 1649 return false; 1650 } 1651 getOriginalSize()1652 public String getOriginalSize() { 1653 if (width == INT_UNSET || height == INT_UNSET) { 1654 return getParsedName().getSize(); 1655 } 1656 return width + "x" + height; 1657 } 1658 getDescription()1659 public String getDescription() { 1660 return getParsedName().getDescription(); 1661 } 1662 toPrettyString()1663 public String toPrettyString() { 1664 return getParsedName().toPrettyString(); 1665 } 1666 getAssetFileDescriptor(Context context)1667 public AssetFileDescriptor getAssetFileDescriptor(Context context) { 1668 try { 1669 return context.getAssets().openFd(filename); 1670 } catch (Exception e) { 1671 e.printStackTrace(); 1672 return null; 1673 } 1674 } 1675 1676 } 1677 1678 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */ 1679 class FilenameParser { 1680 1681 static final String VP9 = "vp9"; 1682 static final String H264 = "h264"; 1683 1684 private final String filename; 1685 1686 private String codec = VideoFormat.STRING_UNSET; 1687 private String description = VideoFormat.STRING_UNSET; 1688 private int width = VideoFormat.INT_UNSET; 1689 private int height = VideoFormat.INT_UNSET; 1690 FilenameParser(String filename)1691 FilenameParser(String filename) { 1692 this.filename = filename; 1693 parseFilename(filename); 1694 } 1695 getCodec()1696 public String getCodec() { 1697 return codec; 1698 } 1699 getMimeType()1700 public String getMimeType() { 1701 switch (codec) { 1702 case H264: 1703 return MimeTypes.VIDEO_H264; 1704 case VP9: 1705 return MimeTypes.VIDEO_VP9; 1706 default: 1707 return null; 1708 } 1709 } 1710 getWidth()1711 public int getWidth() { 1712 return width; 1713 } 1714 getHeight()1715 public int getHeight() { 1716 return height; 1717 } 1718 getSize()1719 public String getSize() { 1720 return width + "x" + height; 1721 } 1722 getDescription()1723 public String getDescription() { 1724 return description; 1725 } 1726 toPrettyString()1727 String toPrettyString() { 1728 if (codec != null) { 1729 return codec.toUpperCase() + " " + getSize(); 1730 } 1731 return filename; 1732 } 1733 parseFilename(String filename)1734 private void parseFilename(String filename) { 1735 final String descriptionDelimiter = "-"; 1736 final String infoDelimiter = "_"; 1737 final String sizeDelimiter = "x"; 1738 try { 1739 this.description = filename.split(descriptionDelimiter)[0]; 1740 final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter); 1741 this.codec = fileInfo[0]; 1742 this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]); 1743 this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]); 1744 } catch (Exception exception) { /* Filename format does not match. */ } 1745 } 1746 1747 } 1748 1749 /** 1750 * Compares bitmaps to determine if they are similar. 1751 * 1752 * <p>To determine greatest pixel difference we transform each pixel into the 1753 * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences. 1754 */ 1755 class BitmapCompare { 1756 1757 private static final int RED = 0; 1758 private static final int GREEN = 1; 1759 private static final int BLUE = 2; 1760 private static final int X = 0; 1761 private static final int Y = 1; 1762 private static final int Z = 2; 1763 BitmapCompare()1764 private BitmapCompare() {} 1765 1766 /** 1767 * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity. 1768 * 1769 * @param bitmap1 A bitmap to compare to bitmap2. 1770 * @param bitmap2 A bitmap to compare to bitmap1. 1771 * @return A {@link Difference} with an integer describing the greatest pixel difference, 1772 * using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional 1773 * {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found. 1774 */ 1775 @TargetApi(12) computeDifference(Bitmap bitmap1, Bitmap bitmap2)1776 public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) { 1777 if (bitmap1 == null || bitmap2 == null) { 1778 return new Difference(Integer.MAX_VALUE); 1779 } 1780 if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) { 1781 return new Difference(0); 1782 } 1783 if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) { 1784 return new Difference(Integer.MAX_VALUE); 1785 } 1786 // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using 1787 // euclidean distance formula. 1788 final double[][] pixels1 = convertRgbToCieLab(bitmap1); 1789 final double[][] pixels2 = convertRgbToCieLab(bitmap2); 1790 int greatestDifference = 0; 1791 int greatestDifferenceIndex = -1; 1792 for (int i = 0; i < pixels1.length; i++) { 1793 final int difference = euclideanDistance(pixels1[i], pixels2[i]); 1794 if (difference > greatestDifference) { 1795 greatestDifference = difference; 1796 greatestDifferenceIndex = i; 1797 } 1798 } 1799 return new Difference(greatestDifference, Pair.create( 1800 greatestDifferenceIndex % bitmap1.getWidth(), 1801 greatestDifferenceIndex / bitmap1.getHeight())); 1802 } 1803 1804 @SuppressLint("UseSparseArrays") convertRgbToCieLab(Bitmap bitmap)1805 private static double[][] convertRgbToCieLab(Bitmap bitmap) { 1806 final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>(); 1807 final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3]; 1808 final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()]; 1809 bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 1810 for (int i = 0; i < pixels.length; i++) { 1811 final double[] transformedColor = pixelTransformCache.get(pixels[i]); 1812 if (transformedColor != null) { 1813 result[i] = transformedColor; 1814 } else { 1815 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i])); 1816 pixelTransformCache.put(pixels[i], result[i]); 1817 } 1818 } 1819 return result; 1820 } 1821 1822 /** 1823 * Conversion from RGB to XYZ based algorithm as defined by: 1824 * http://www.easyrgb.com/index.php?X=MATH&H=02#text2 1825 * 1826 * <p><pre>{@code 1827 * var_R = ( R / 255 ) //R from 0 to 255 1828 * var_G = ( G / 255 ) //G from 0 to 255 1829 * var_B = ( B / 255 ) //B from 0 to 255 1830 * 1831 * if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4 1832 * else var_R = var_R / 12.92 1833 * if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4 1834 * else var_G = var_G / 12.92 1835 * if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4 1836 * else var_B = var_B / 12.92 1837 * 1838 * var_R = var_R * 100 1839 * var_G = var_G * 100 1840 * var_B = var_B * 100 1841 * 1842 * // Observer. = 2°, Illuminant = D65 1843 * X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805 1844 * Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722 1845 * Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505 1846 * }</pre> 1847 * 1848 * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue. 1849 * @return An array of doubles where each value is a component of the XYZ color space. 1850 */ convertRgbToXyz(int rgbColor)1851 private static double[] convertRgbToXyz(int rgbColor) { 1852 final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)}; 1853 for (int i = 0; i < comp.length; i++) { 1854 comp[i] /= 255.0; 1855 if (comp[i] > 0.04045) { 1856 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4); 1857 } else { 1858 comp[i] /= 12.92; 1859 } 1860 comp[i] *= 100; 1861 } 1862 final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805); 1863 final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722); 1864 final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505); 1865 return new double[] {x, y, z}; 1866 } 1867 1868 /** 1869 * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by: 1870 * http://www.easyrgb.com/index.php?X=MATH&H=07#text7 1871 * 1872 * <p><pre> 1873 * {@code 1874 * var_X = X / ref_X //ref_X = 95.047 Observer= 2°, Illuminant= D65 1875 * var_Y = Y / ref_Y //ref_Y = 100.000 1876 * var_Z = Z / ref_Z //ref_Z = 108.883 1877 * 1878 * if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 ) 1879 * else var_X = ( 7.787 * var_X ) + ( 16 / 116 ) 1880 * if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 ) 1881 * else var_Y = ( 7.787 * var_Y ) + ( 16 / 116 ) 1882 * if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 ) 1883 * else var_Z = ( 7.787 * var_Z ) + ( 16 / 116 ) 1884 * 1885 * CIE-L* = ( 116 * var_Y ) - 16 1886 * CIE-a* = 500 * ( var_X - var_Y ) 1887 * CIE-b* = 200 * ( var_Y - var_Z ) 1888 * } 1889 * </pre> 1890 * 1891 * @param comp An array of doubles where each value is a component of the XYZ color space. 1892 * @return An array of doubles where each value is a component of the CIE-L*a*b* color space. 1893 */ convertXyzToCieLab(double[] comp)1894 private static double[] convertXyzToCieLab(double[] comp) { 1895 comp[X] /= 95.047; 1896 comp[Y] /= 100.0; 1897 comp[Z] /= 108.883; 1898 for (int i = 0; i < comp.length; i++) { 1899 if (comp[i] > 0.008856) { 1900 comp[i] = Math.pow(comp[i], (1.0 / 3.0)); 1901 } else { 1902 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0); 1903 } 1904 } 1905 final double l = (116 * comp[Y]) - 16; 1906 final double a = 500 * (comp[X] - comp[Y]); 1907 final double b = 200 * (comp[Y] - comp[Z]); 1908 return new double[] {l, a, b}; 1909 } 1910 euclideanDistance(double[] p1, double[] p2)1911 private static int euclideanDistance(double[] p1, double[] p2) { 1912 if (p1.length != p2.length) { 1913 return Integer.MAX_VALUE; 1914 } 1915 double result = 0; 1916 for (int i = 0; i < p1.length; i++) { 1917 result += Math.pow(p1[i] - p2[i], 2); 1918 } 1919 return (int) Math.round(Math.sqrt(result)); 1920 } 1921 1922 /** 1923 * Crops the border of the array representing an image by hBorderSize 1924 * pixels on the left and right borders, and by vBorderSize pixels on the 1925 * top and bottom borders (so the width is 2 * hBorderSize smaller and 1926 * the height is 2 * vBorderSize smaller), then scales the image up to 1927 * match the original size using bilinear interpolation. 1928 */ shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1929 private static Bitmap shrinkAndScaleBilinear( 1930 Bitmap input, double hBorderSize, double vBorderSize) { 1931 1932 int width = input.getWidth(); 1933 int height = input.getHeight(); 1934 1935 // Compute the proper step sizes 1936 double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1); 1937 double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1); 1938 1939 // Read the input bitmap into RGB arrays. 1940 int[] inputPixels = new int[width * height]; 1941 input.getPixels(inputPixels, 0, width, 0, 0, width, height); 1942 int[][] inputRgb = new int[width * height][3]; 1943 for (int i = 0; i < width * height; ++i) { 1944 inputRgb[i][0] = Color.red(inputPixels[i]); 1945 inputRgb[i][1] = Color.green(inputPixels[i]); 1946 inputRgb[i][2] = Color.blue(inputPixels[i]); 1947 } 1948 inputPixels = null; 1949 1950 // Prepare the output buffer. 1951 int[] outputPixels = new int[width * height]; 1952 1953 // Start the iteration. The first y coordinate is vBorderSize. 1954 double y = vBorderSize; 1955 for (int yIndex = 0; yIndex < height; ++yIndex) { 1956 // The first x coordinate is hBorderSize. 1957 double x = hBorderSize; 1958 for (int xIndex = 0; xIndex < width; ++xIndex) { 1959 // Determine the square of interest. 1960 int left = (int)x; // This is floor(x). 1961 int top = (int)y; // This is floor(y). 1962 int right = left + 1; 1963 int bottom = top + 1; 1964 1965 // (u, v) is the fractional part of (x, y). 1966 double u = x - (double)left; 1967 double v = y - (double)top; 1968 1969 // Precompute necessary products to save time. 1970 double p00 = (1.0 - u) * (1.0 - v); 1971 double p01 = (1.0 - u) * v; 1972 double p10 = u * (1.0 - v); 1973 double p11 = u * v; 1974 1975 // Clamp the indices to prevent out-of-bound that may be caused 1976 // by round-off error. 1977 if (left >= width) left = width - 1; 1978 if (top >= height) top = height - 1; 1979 if (right >= width) right = width - 1; 1980 if (bottom >= height) bottom = height - 1; 1981 1982 // Sample RGB values from the four corners. 1983 int[] rgb00 = inputRgb[top * width + left]; 1984 int[] rgb01 = inputRgb[bottom * width + left]; 1985 int[] rgb10 = inputRgb[top * width + right]; 1986 int[] rgb11 = inputRgb[bottom * width + right]; 1987 1988 // Interpolate each component of RGB separately. 1989 int[] mixedColor = new int[3]; 1990 for (int k = 0; k < 3; ++k) { 1991 mixedColor[k] = (int)Math.round( 1992 p00 * (double) rgb00[k] + p01 * (double) rgb01[k] 1993 + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]); 1994 } 1995 // Convert RGB to bitmap Color format and store. 1996 outputPixels[yIndex * width + xIndex] = Color.rgb( 1997 mixedColor[0], mixedColor[1], mixedColor[2]); 1998 x += xInc; 1999 } 2000 y += yInc; 2001 } 2002 // Assemble the output buffer into a Bitmap object. 2003 return Bitmap.createBitmap(outputPixels, width, height, input.getConfig()); 2004 } 2005 2006 /** 2007 * Calls computeDifference on multiple cropped-and-scaled versions of 2008 * bitmap2. 2009 */ 2010 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops)2011 public static Difference computeMinimumDifference( 2012 Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) { 2013 2014 // Compute the difference with the original image (bitmap2) first. 2015 Difference minDiff = computeDifference(bitmap1, bitmap2); 2016 // Then go through the list of borderCrops. 2017 for (Pair<Double, Double> borderCrop : borderCrops) { 2018 // Compute the difference between bitmap1 and a transformed 2019 // version of bitmap2. 2020 Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second); 2021 Difference d = computeDifference(bitmap1, bitmap2s); 2022 // Keep the minimum difference. 2023 if (d.greatestPixelDifference < minDiff.greatestPixelDifference) { 2024 minDiff = d; 2025 minDiff.bestMatchBorderCrop = borderCrop; 2026 } 2027 } 2028 return minDiff; 2029 } 2030 2031 /** 2032 * Calls computeMinimumDifference on a default list of borderCrop. 2033 */ 2034 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight)2035 public static Difference computeMinimumDifference( 2036 Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) { 2037 2038 double hBorder = (double) bitmap1.getWidth() / (double) trueWidth; 2039 double vBorder = (double) bitmap1.getHeight() / (double) trueHeight; 2040 double hBorderH = 0.5 * hBorder; // Half-texel horizontal border 2041 double vBorderH = 0.5 * vBorder; // Half-texel vertical border 2042 return computeMinimumDifference( 2043 bitmap1, 2044 bitmap2, 2045 new Pair[] { 2046 Pair.create(hBorderH, 0.0), 2047 Pair.create(hBorderH, vBorderH), 2048 Pair.create(0.0, vBorderH), 2049 Pair.create(hBorder, 0.0), 2050 Pair.create(hBorder, vBorder), 2051 Pair.create(0.0, vBorder) 2052 }); 2053 // This default list of borderCrop comes from the behavior of 2054 // GLConsumer.computeTransformMatrix(). 2055 } 2056 2057 /* Describes the difference between two {@link Bitmap} instances. */ 2058 public static final class Difference { 2059 2060 public final int greatestPixelDifference; 2061 public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates; 2062 public Pair<Double, Double> bestMatchBorderCrop; 2063 Difference(int greatestPixelDifference)2064 private Difference(int greatestPixelDifference) { 2065 this(greatestPixelDifference, null, Pair.create(0.0, 0.0)); 2066 } 2067 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2068 private Difference( 2069 int greatestPixelDifference, 2070 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) { 2071 this(greatestPixelDifference, greatestPixelDifferenceCoordinates, 2072 Pair.create(0.0, 0.0)); 2073 } 2074 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2075 private Difference( 2076 int greatestPixelDifference, 2077 Pair<Integer, Integer> greatestPixelDifferenceCoordinates, 2078 Pair<Double, Double> bestMatchBorderCrop) { 2079 this.greatestPixelDifference = greatestPixelDifference; 2080 this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates; 2081 this.bestMatchBorderCrop = bestMatchBorderCrop; 2082 } 2083 } 2084 2085 } 2086 2087 /* Wrapper for MIME types. */ 2088 final class MimeTypes { 2089 MimeTypes()2090 private MimeTypes() {} 2091 2092 public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9"; 2093 public static final String VIDEO_H264 = "video/avc"; 2094 isVideo(String mimeType)2095 public static boolean isVideo(String mimeType) { 2096 return mimeType.startsWith("video"); 2097 } 2098 2099 } 2100