1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import static org.junit.Assert.assertNotNull; 19 20 import android.annotation.SuppressLint; 21 import android.annotation.TargetApi; 22 import android.app.Activity; 23 import android.content.Context; 24 import android.content.Intent; 25 import android.content.pm.ActivityInfo; 26 import android.content.res.AssetFileDescriptor; 27 import android.content.res.Configuration; 28 import android.content.res.Resources; 29 import android.graphics.Bitmap; 30 import android.graphics.Bitmap.Config; 31 import android.graphics.BitmapFactory; 32 import android.graphics.Color; 33 import android.graphics.SurfaceTexture; 34 import android.media.MediaCodec; 35 import android.media.MediaCodec.BufferInfo; 36 import android.media.MediaCodecInfo.VideoCapabilities; 37 import android.media.MediaCodecList; 38 import android.media.MediaExtractor; 39 import android.media.MediaFormat; 40 import android.media.cts.R; 41 import android.opengl.EGL14; 42 import android.opengl.GLES11Ext; 43 import android.opengl.GLES20; 44 import android.opengl.GLSurfaceView; 45 import android.os.Build; 46 import android.os.Handler; 47 import android.os.HandlerThread; 48 import android.os.Looper; 49 import android.os.ParcelFileDescriptor; 50 import android.os.SystemClock; 51 import android.util.Log; 52 import android.util.Pair; 53 import android.view.PixelCopy; 54 import android.view.PixelCopy.OnPixelCopyFinishedListener; 55 import android.view.Surface; 56 import android.view.SurfaceHolder; 57 import android.view.SurfaceView; 58 import android.view.TextureView; 59 import android.view.View; 60 import android.view.ViewGroup; 61 import android.widget.RelativeLayout; 62 63 import androidx.test.rule.ActivityTestRule; 64 65 import com.android.compatibility.common.util.ApiLevelUtil; 66 import com.android.compatibility.common.util.MediaUtils; 67 68 import org.junit.After; 69 import org.junit.Before; 70 import org.junit.Rule; 71 72 import java.io.File; 73 import java.io.IOException; 74 import java.io.FileNotFoundException; 75 import java.nio.ByteBuffer; 76 import java.nio.ByteOrder; 77 import java.nio.FloatBuffer; 78 import java.util.HashMap; 79 import java.util.concurrent.TimeUnit; 80 81 import javax.microedition.khronos.egl.EGL10; 82 import javax.microedition.khronos.egl.EGLConfig; 83 import javax.microedition.khronos.egl.EGLContext; 84 import javax.microedition.khronos.egl.EGLDisplay; 85 import javax.microedition.khronos.egl.EGLSurface; 86 87 @TargetApi(16) 88 public class DecodeAccuracyTestBase { 89 90 protected Context mContext; 91 protected Resources mResources; 92 protected DecodeAccuracyTestActivity mActivity; 93 protected TestHelper testHelper; 94 95 @Rule 96 public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule = 97 new ActivityTestRule<>(DecodeAccuracyTestActivity.class); 98 99 @Before setUp()100 public void setUp() throws Exception { 101 mActivity = mActivityRule.getActivity(); 102 mContext = mActivity.getApplicationContext(); 103 mResources = mActivity.getResources(); 104 testHelper = new TestHelper(mContext, mActivity); 105 } 106 107 @After tearDown()108 public void tearDown() throws Exception { 109 mActivity = null; 110 mResources = null; 111 mContext = null; 112 mActivityRule = null; 113 } 114 bringActivityToFront()115 protected void bringActivityToFront() { 116 Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class); 117 intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT); 118 mActivity.startActivity(intent); 119 } 120 getHelper()121 protected TestHelper getHelper() { 122 return testHelper; 123 } 124 checkNotNull(T reference)125 public static <T> T checkNotNull(T reference) { 126 assertNotNull(reference); 127 return reference; 128 } 129 checkNotNull(String msg, T reference)130 public static <T> T checkNotNull(String msg, T reference) { 131 assertNotNull(msg, reference); 132 return reference; 133 } 134 135 /* Simple Player that decodes a local video file only. */ 136 @TargetApi(16) 137 static class SimplePlayer { 138 139 public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 5; // 5 FPS 140 public static final long STARTUP_ALLOW_MS = TimeUnit.SECONDS.toMillis(1) ; 141 public static final int END_OF_STREAM = -1; 142 public static final int DEQUEUE_SUCCESS = 1; 143 public static final int DEQUEUE_FAIL = 0; 144 145 private static final String TAG = SimplePlayer.class.getSimpleName(); 146 private static final int NO_TRACK_INDEX = -3; 147 private static final long DEQUEUE_TIMEOUT_US = 20; 148 149 private final Context context; 150 private final MediaExtractor extractor; 151 private final String codecName; 152 private MediaCodec decoder; 153 private byte[] outputBytes; 154 private boolean renderToSurface; 155 private MediaCodecList mediaCodecList; 156 private Surface surface; 157 SimplePlayer(Context context)158 public SimplePlayer(Context context) { 159 this(context, null); 160 } 161 SimplePlayer(Context context, String codecName)162 public SimplePlayer(Context context, String codecName) { 163 this.context = checkNotNull(context); 164 this.codecName = codecName; 165 this.extractor = new MediaExtractor(); 166 this.renderToSurface = false; 167 this.surface = null; 168 } 169 170 /** 171 * The function play the corresponding file for certain number of frames. 172 * 173 * @param surface is the surface view of decoder output. 174 * @param videoFormat is the format of the video to extract and decode. 175 * @param numOfTotalFrames is the number of Frame wish to play. 176 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 177 * @return {@link PlayerResult} that consists the result. 178 */ decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, boolean releasePlayer)179 public PlayerResult decodeVideoFrames( 180 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, 181 boolean releasePlayer) { 182 this.surface = surface; 183 PlayerResult playerResult; 184 if (prepareVideoDecode(videoFormat)) { 185 if (startDecoder()) { 186 final long timeout = 187 Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames + STARTUP_ALLOW_MS; 188 playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap); 189 } else { 190 playerResult = PlayerResult.failToStart(); 191 } 192 } else { 193 playerResult = new PlayerResult(); 194 } 195 if (releasePlayer) { 196 release(); 197 } 198 return new PlayerResult(playerResult); 199 } 200 decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)201 public PlayerResult decodeVideoFrames( 202 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) { 203 return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0, false); 204 } 205 206 /** 207 * The function sets up the extractor and video decoder with proper format. 208 * This must be called before doing starting up the decoder. 209 */ prepareVideoDecode(VideoFormat videoFormat)210 private boolean prepareVideoDecode(VideoFormat videoFormat) { 211 MediaFormat mediaFormat = prepareExtractor(videoFormat); 212 if (mediaFormat == null) { 213 return false; 214 } 215 configureVideoFormat(mediaFormat, videoFormat); 216 setRenderToSurface(surface != null); 217 return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat); 218 } 219 220 /** 221 * Sets up the extractor and gets the {@link MediaFormat} of the track. 222 */ prepareExtractor(VideoFormat videoFormat)223 private MediaFormat prepareExtractor(VideoFormat videoFormat) { 224 if (!setExtractorDataSource(videoFormat)) { 225 return null; 226 } 227 final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat()); 228 if (trackNum == NO_TRACK_INDEX) { 229 return null; 230 } 231 extractor.selectTrack(trackNum); 232 return extractor.getTrackFormat(trackNum); 233 } 234 235 /** 236 * The function decode video frames and display in a surface. 237 * 238 * @param numOfTotalFrames is the number of frames to be decoded. 239 * @param timeOutMs is the time limit for decoding the frames. 240 * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1. 241 * @return {@link PlayerResult} that consists the result. 242 */ decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)243 private PlayerResult decodeFramesAndPlay( 244 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) { 245 int numOfDecodedFrames = 0; 246 long firstOutputTimeMs = 0; 247 long lastFrameAt = 0; 248 final long loopStart = SystemClock.elapsedRealtime(); 249 250 while (numOfDecodedFrames < numOfTotalFrames 251 && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) { 252 try { 253 queueDecoderInputBuffer(); 254 } catch (IllegalStateException exception) { 255 Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception); 256 break; 257 } 258 try { 259 final int outputResult = dequeueDecoderOutputBuffer(); 260 if (outputResult == SimplePlayer.END_OF_STREAM) { 261 break; 262 } 263 if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) { 264 if (firstOutputTimeMs == 0) { 265 firstOutputTimeMs = SystemClock.elapsedRealtime(); 266 } 267 if (msPerFrameCap > 0) { 268 // Slow down if cap is set and not reached. 269 final long delayMs = 270 msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt); 271 if (lastFrameAt != 0 && delayMs > 0) { 272 final long threadDelayMs = 3; // In case of delay in thread. 273 if (delayMs > threadDelayMs) { 274 try { 275 Thread.sleep(delayMs - threadDelayMs); 276 } catch (InterruptedException ex) { /* */} 277 } 278 while (SystemClock.elapsedRealtime() - lastFrameAt 279 < msPerFrameCap) { /* */ } 280 } 281 lastFrameAt = SystemClock.elapsedRealtime(); 282 } 283 numOfDecodedFrames++; 284 } 285 } catch (IllegalStateException exception) { 286 Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception); 287 } 288 } 289 // NB: totalTime measures from "first output" instead of 290 // "first INPUT", so does not include first frame latency 291 // and therefore does not tell us if the timeout expired 292 final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs; 293 return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime); 294 } 295 296 /** 297 * Queues the input buffer with the media file one buffer at a time. 298 * 299 * @return true if success, fail otherwise. 300 */ queueDecoderInputBuffer()301 private boolean queueDecoderInputBuffer() { 302 ByteBuffer inputBuffer; 303 final ByteBuffer[] inputBufferArray = decoder.getInputBuffers(); 304 final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 305 if (inputBufferIndex >= 0) { 306 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 307 inputBuffer = inputBufferArray[inputBufferIndex]; 308 } else { 309 inputBuffer = decoder.getInputBuffer(inputBufferIndex); 310 } 311 final int sampleSize = extractor.readSampleData(inputBuffer, 0); 312 if (sampleSize > 0) { 313 decoder.queueInputBuffer( 314 inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0); 315 extractor.advance(); 316 } 317 return true; 318 } 319 return false; 320 } 321 322 /** 323 * Dequeues the output buffer. 324 * For video decoder, renders to surface if provided. 325 * For audio decoder, gets the bytes from the output buffer. 326 * 327 * @return an integer indicating its status (fail, success, or end of stream). 328 */ dequeueDecoderOutputBuffer()329 private int dequeueDecoderOutputBuffer() { 330 final BufferInfo info = new BufferInfo(); 331 final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 332 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 333 return END_OF_STREAM; 334 } 335 if (decoderStatus >= 0) { 336 // For JELLY_BEAN_MR2- devices, when rendering to a surface, 337 // info.size seems to always return 0 even if 338 // the decoder successfully decoded the frame. 339 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) { 340 return DEQUEUE_FAIL; 341 } 342 if (!renderToSurface) { 343 ByteBuffer outputBuffer; 344 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) { 345 outputBuffer = decoder.getOutputBuffers()[decoderStatus]; 346 } else { 347 outputBuffer = decoder.getOutputBuffer(decoderStatus); 348 } 349 outputBytes = new byte[info.size]; 350 outputBuffer.get(outputBytes); 351 outputBuffer.clear(); 352 } 353 decoder.releaseOutputBuffer(decoderStatus, renderToSurface); 354 return DEQUEUE_SUCCESS; 355 } 356 return DEQUEUE_FAIL; 357 } 358 release()359 public void release() { 360 decoderRelease(); 361 extractorRelease(); 362 } 363 setExtractorDataSource(VideoFormat videoFormat)364 private boolean setExtractorDataSource(VideoFormat videoFormat) { 365 checkNotNull(videoFormat); 366 try { 367 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor(); 368 extractor.setDataSource( 369 afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); 370 afd.close(); 371 } catch (IOException exception) { 372 Log.e(TAG, "IOException in setDataSource", exception); 373 return false; 374 } 375 return true; 376 } 377 378 /** 379 * Creates a decoder based on conditions. 380 * 381 * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used. 382 * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)} 383 * is preferred on LOLLIPOP and up for finding out the codec name that 384 * supports the media format. 385 * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used. 386 */ createDecoder(MediaFormat mediaFormat)387 private boolean createDecoder(MediaFormat mediaFormat) { 388 try { 389 if (codecName != null) { 390 decoder = MediaCodec.createByCodecName(codecName); 391 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) { 392 if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) { 393 // On LOLLIPOP, format must not contain a frame rate. 394 mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null); 395 } 396 if (mediaCodecList == null) { 397 mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 398 } 399 decoder = MediaCodec.createByCodecName( 400 mediaCodecList.findDecoderForFormat(mediaFormat)); 401 } else { 402 decoder = MediaCodec.createDecoderByType( 403 mediaFormat.getString(MediaFormat.KEY_MIME)); 404 } 405 } catch (Exception exception) { 406 Log.e(TAG, "Exception during decoder creation", exception); 407 decoderRelease(); 408 return false; 409 } 410 return true; 411 } 412 configureDecoder(Surface surface, MediaFormat mediaFormat)413 private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) { 414 try { 415 decoder.configure(mediaFormat, surface, null, 0); 416 } catch (Exception exception) { 417 Log.e(TAG, "Exception during decoder configuration", exception); 418 try { 419 decoder.reset(); 420 } catch (Exception resetException) { 421 Log.e(TAG, "Exception during decoder reset", resetException); 422 } 423 decoderRelease(); 424 return false; 425 } 426 return true; 427 } 428 setRenderToSurface(boolean render)429 private void setRenderToSurface(boolean render) { 430 this.renderToSurface = render; 431 } 432 startDecoder()433 private boolean startDecoder() { 434 try { 435 decoder.start(); 436 } catch (Exception exception) { 437 Log.e(TAG, "Exception during decoder start", exception); 438 decoder.reset(); 439 decoderRelease(); 440 return false; 441 } 442 return true; 443 } 444 decoderRelease()445 private void decoderRelease() { 446 if (decoder == null) { 447 return; 448 } 449 try { 450 decoder.stop(); 451 } catch (IllegalStateException exception) { 452 decoder.reset(); 453 // IllegalStateException happens when decoder fail to start. 454 Log.e(TAG, "IllegalStateException during decoder stop", exception); 455 } finally { 456 try { 457 decoder.release(); 458 } catch (IllegalStateException exception) { 459 Log.e(TAG, "IllegalStateException during decoder release", exception); 460 } 461 decoder = null; 462 } 463 } 464 extractorRelease()465 private void extractorRelease() { 466 if (extractor == null) { 467 return; 468 } 469 try { 470 extractor.release(); 471 } catch (IllegalStateException exception) { 472 Log.e(TAG, "IllegalStateException during extractor release", exception); 473 } 474 } 475 configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)476 private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) { 477 checkNotNull(mediaFormat); 478 checkNotNull(videoFormat); 479 videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME)); 480 videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH)); 481 videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)); 482 mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth()); 483 mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight()); 484 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) { 485 return; 486 } 487 // Set KEY_MAX_WIDTH and KEY_MAX_HEIGHT when isAbrEnabled() is set. 488 if (videoFormat.isAbrEnabled()) { 489 try { 490 // Check for max resolution supported by the codec. 491 final MediaCodec decoder = MediaUtils.getDecoder(mediaFormat); 492 final VideoCapabilities videoCapabilities = MediaUtils.getVideoCapabilities( 493 decoder.getName(), videoFormat.getMimeType()); 494 decoder.release(); 495 final int maxWidth = videoCapabilities.getSupportedWidths().getUpper(); 496 final int maxHeight = 497 videoCapabilities.getSupportedHeightsFor(maxWidth).getUpper(); 498 if (maxWidth >= videoFormat.getWidth() && maxHeight >= videoFormat.getHeight()) { 499 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, maxWidth); 500 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, maxHeight); 501 return; 502 } 503 } catch (NullPointerException exception) { /* */ } 504 // Set max width/height to current size if can't get codec's max supported 505 // width/height or max is not greater than the current size. 506 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getWidth()); 507 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getHeight()); 508 } 509 } 510 511 /** 512 * The function returns the first track found based on the media type. 513 */ getFirstTrackIndexByType(String format)514 private int getFirstTrackIndexByType(String format) { 515 for (int i = 0; i < extractor.getTrackCount(); i++) { 516 MediaFormat trackMediaFormat = extractor.getTrackFormat(i); 517 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) { 518 return i; 519 } 520 } 521 Log.e(TAG, "couldn't get a " + format + " track"); 522 return NO_TRACK_INDEX; 523 } 524 525 /** 526 * Stores the result from SimplePlayer. 527 */ 528 public static final class PlayerResult { 529 530 public static final int UNSET = -1; 531 private final boolean configureSuccess; 532 private final boolean startSuccess; 533 private final boolean decodeSuccess; 534 private final long totalTime; 535 PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)536 public PlayerResult( 537 boolean configureSuccess, boolean startSuccess, 538 boolean decodeSuccess, long totalTime) { 539 this.configureSuccess = configureSuccess; 540 this.startSuccess = startSuccess; 541 this.decodeSuccess = decodeSuccess; 542 this.totalTime = totalTime; 543 } 544 PlayerResult(PlayerResult playerResult)545 public PlayerResult(PlayerResult playerResult) { 546 this(playerResult.configureSuccess, playerResult.startSuccess, 547 playerResult.decodeSuccess, playerResult.totalTime); 548 } 549 PlayerResult()550 public PlayerResult() { 551 // Fake PlayerResult. 552 this(false, false, false, UNSET); 553 } 554 failToStart()555 public static PlayerResult failToStart() { 556 return new PlayerResult(true, false, false, UNSET); 557 } 558 getFailureMessage()559 public String getFailureMessage() { 560 if (!configureSuccess) { 561 return "Failed to configure decoder."; 562 } else if (!startSuccess) { 563 return "Failed to start decoder."; 564 } else if (!decodeSuccess) { 565 return "Failed to decode the expected number of frames."; 566 } else { 567 return "Failed to finish decoding."; 568 } 569 } 570 isConfigureSuccess()571 public boolean isConfigureSuccess() { 572 return configureSuccess; 573 } 574 isSuccess()575 public boolean isSuccess() { 576 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET; 577 } 578 getTotalTime()579 public long getTotalTime() { 580 return totalTime; 581 } 582 583 } 584 585 } 586 587 /* Utility class for collecting common test case functionality. */ 588 class TestHelper { 589 590 private final String TAG = TestHelper.class.getSimpleName(); 591 592 private final Context context; 593 private final Handler handler; 594 private final Activity activity; 595 TestHelper(Context context, Activity activity)596 public TestHelper(Context context, Activity activity) { 597 this.context = checkNotNull(context); 598 this.handler = new Handler(Looper.getMainLooper()); 599 this.activity = activity; 600 } 601 generateBitmapFromImageResourceId(int resourceId)602 public Bitmap generateBitmapFromImageResourceId(int resourceId) { 603 return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId)); 604 } 605 getContext()606 public Context getContext() { 607 return context; 608 } 609 rotateOrientation()610 public void rotateOrientation() { 611 handler.post(new Runnable() { 612 @Override 613 public void run() { 614 final int orientation = context.getResources().getConfiguration().orientation; 615 if (orientation == Configuration.ORIENTATION_PORTRAIT) { 616 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 617 } else { 618 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); 619 } 620 } 621 }); 622 } 623 unsetOrientation()624 public void unsetOrientation() { 625 handler.post(new Runnable() { 626 @Override 627 public void run() { 628 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); 629 } 630 }); 631 } 632 generateView(View view)633 public void generateView(View view) { 634 RelativeLayout relativeLayout = 635 (RelativeLayout) activity.findViewById(R.id.attach_view); 636 ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view); 637 handler.post(viewGenerator); 638 } 639 cleanUpView(View view)640 public void cleanUpView(View view) { 641 ViewCleaner viewCleaner = new ViewCleaner(view); 642 handler.post(viewCleaner); 643 } 644 generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)645 public Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) { 646 handler.post(snapshot); 647 synchronized (snapshot.getSyncObject()) { 648 try { 649 snapshot.getSyncObject().wait(snapshot.SNAPSHOT_TIMEOUT_MS + 100); 650 } catch (InterruptedException e) { 651 e.printStackTrace(); 652 Log.e(TAG, "Unable to finish generateBitmapFromVideoViewSnapshot()."); 653 return null; 654 } 655 } 656 if (!snapshot.isBitmapReady()) { 657 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot()."); 658 return null; 659 } 660 return snapshot.getBitmap(); 661 } 662 663 private class ViewGenerator implements Runnable { 664 665 private final View view; 666 private final RelativeLayout relativeLayout; 667 ViewGenerator(RelativeLayout relativeLayout, View view)668 public ViewGenerator(RelativeLayout relativeLayout, View view) { 669 this.view = checkNotNull(view); 670 this.relativeLayout = checkNotNull(relativeLayout); 671 } 672 673 @Override run()674 public void run() { 675 if (view.getParent() != null) { 676 ((ViewGroup) view.getParent()).removeView(view); 677 } 678 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( 679 VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT); 680 view.setLayoutParams(params); 681 relativeLayout.addView(view); 682 } 683 684 } 685 686 private class ViewCleaner implements Runnable { 687 688 private final View view; 689 ViewCleaner(View view)690 public ViewCleaner(View view) { 691 this.view = checkNotNull(view); 692 } 693 694 @Override run()695 public void run() { 696 if (view.getParent() != null) { 697 ((ViewGroup) view.getParent()).removeView(view); 698 } 699 } 700 701 } 702 703 } 704 705 } 706 707 /* Factory for manipulating a {@link View}. */ 708 abstract class VideoViewFactory { 709 710 public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1); 711 public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3); 712 public static final int VIEW_WIDTH = 480; 713 public static final int VIEW_HEIGHT = 360; 714 VideoViewFactory()715 public VideoViewFactory() {} 716 release()717 public abstract void release(); 718 getName()719 public abstract String getName(); 720 createView(Context context)721 public abstract View createView(Context context); 722 waitForViewIsAvailable()723 public void waitForViewIsAvailable() throws Exception { 724 waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS); 725 }; 726 waitForViewIsAvailable(long timeOutMs)727 public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception; 728 getSurface()729 public abstract Surface getSurface(); 730 getVideoViewSnapshot()731 public abstract VideoViewSnapshot getVideoViewSnapshot(); 732 hasLooper()733 public boolean hasLooper() { 734 return Looper.myLooper() != null; 735 } 736 737 } 738 739 /* Factory for building a {@link TextureView}. */ 740 @TargetApi(16) 741 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener { 742 743 private static final String TAG = TextureViewFactory.class.getSimpleName(); 744 private static final String NAME = "TextureView"; 745 746 private final Object syncToken = new Object(); 747 private TextureView textureView; 748 TextureViewFactory()749 public TextureViewFactory() {} 750 751 @Override createView(Context context)752 public TextureView createView(Context context) { 753 Log.i(TAG, "Creating a " + NAME); 754 textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context)); 755 textureView.setSurfaceTextureListener(this); 756 return textureView; 757 } 758 759 @Override release()760 public void release() { 761 textureView = null; 762 } 763 764 @Override getName()765 public String getName() { 766 return NAME; 767 } 768 769 @Override getSurface()770 public Surface getSurface() { 771 return new Surface(textureView.getSurfaceTexture()); 772 } 773 774 @Override getVideoViewSnapshot()775 public TextureViewSnapshot getVideoViewSnapshot() { 776 return new TextureViewSnapshot(textureView); 777 } 778 779 @Override waitForViewIsAvailable(long timeOutMs)780 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 781 final long start = SystemClock.elapsedRealtime(); 782 while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) { 783 synchronized (syncToken) { 784 try { 785 syncToken.wait(VIEW_WAITTIME_MS); 786 } catch (InterruptedException e) { 787 Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e); 788 throw new InterruptedException(e.getMessage()); 789 } 790 } 791 } 792 if (!textureView.isAvailable()) { 793 throw new InterruptedException("Taking too long to attach a TextureView to a window."); 794 } 795 Log.i(TAG, NAME + " is available."); 796 } 797 798 @Override onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)799 public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { 800 synchronized (syncToken) { 801 syncToken.notify(); 802 } 803 } 804 805 @Override onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)806 public void onSurfaceTextureSizeChanged( 807 SurfaceTexture surfaceTexture, int width, int height) {} 808 809 @Override onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)810 public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { 811 return false; 812 } 813 814 @Override onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)815 public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {} 816 817 } 818 819 /** 820 * Factory for building a {@link SurfaceView} 821 */ 822 @TargetApi(24) 823 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback { 824 825 private static final String TAG = SurfaceViewFactory.class.getSimpleName(); 826 private static final String NAME = "SurfaceView"; 827 private final Object syncToken = new Object(); 828 829 private SurfaceView surfaceView; 830 private SurfaceHolder surfaceHolder; 831 SurfaceViewFactory()832 public SurfaceViewFactory() {} 833 834 @Override release()835 public void release() { 836 surfaceView = null; 837 surfaceHolder = null; 838 } 839 840 @Override getName()841 public String getName() { 842 return NAME; 843 } 844 845 @Override createView(Context context)846 public View createView(Context context) { 847 Log.i(TAG, "Creating a " + NAME); 848 if (!super.hasLooper()) { 849 Looper.prepare(); 850 } 851 surfaceView = new SurfaceView(context); 852 surfaceHolder = surfaceView.getHolder(); 853 surfaceHolder.addCallback(this); 854 return surfaceView; 855 } 856 857 @Override waitForViewIsAvailable(long timeOutMs)858 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 859 final long start = SystemClock.elapsedRealtime(); 860 while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) { 861 synchronized (syncToken) { 862 try { 863 syncToken.wait(VIEW_WAITTIME_MS); 864 } catch (InterruptedException e) { 865 Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e); 866 throw new InterruptedException(e.getMessage()); 867 } 868 } 869 } 870 if (!getSurface().isValid()) { 871 throw new InterruptedException("Taking too long to attach a SurfaceView to a window."); 872 } 873 Log.i(TAG, NAME + " is available."); 874 } 875 876 @Override getSurface()877 public Surface getSurface() { 878 return surfaceHolder == null ? null : surfaceHolder.getSurface(); 879 } 880 881 @Override getVideoViewSnapshot()882 public VideoViewSnapshot getVideoViewSnapshot() { 883 return new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT); 884 } 885 886 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)887 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 888 889 @Override surfaceCreated(SurfaceHolder holder)890 public void surfaceCreated(SurfaceHolder holder) { 891 synchronized (syncToken) { 892 syncToken.notify(); 893 } 894 } 895 896 @Override surfaceDestroyed(SurfaceHolder holder)897 public void surfaceDestroyed(SurfaceHolder holder) {} 898 899 } 900 901 /** 902 * Factory for building EGL and GLES that could render to GLSurfaceView. 903 * {@link GLSurfaceView} {@link EGL10} {@link GLES20}. 904 */ 905 @TargetApi(16) 906 class GLSurfaceViewFactory extends VideoViewFactory { 907 908 private static final String TAG = GLSurfaceViewFactory.class.getSimpleName(); 909 private static final String NAME = "GLSurfaceView"; 910 911 private final Object surfaceSyncToken = new Object(); 912 913 private GLSurfaceViewThread glSurfaceViewThread; 914 private boolean byteBufferIsReady = false; 915 GLSurfaceViewFactory()916 public GLSurfaceViewFactory() {} 917 918 @Override release()919 public void release() { 920 glSurfaceViewThread.release(); 921 glSurfaceViewThread = null; 922 } 923 924 @Override getName()925 public String getName() { 926 return NAME; 927 } 928 929 @Override createView(Context context)930 public View createView(Context context) { 931 Log.i(TAG, "Creating a " + NAME); 932 // Do all GL rendering in the GL thread. 933 glSurfaceViewThread = new GLSurfaceViewThread(); 934 glSurfaceViewThread.start(); 935 // No necessary view to display, return null. 936 return null; 937 } 938 939 @Override waitForViewIsAvailable(long timeOutMs)940 public void waitForViewIsAvailable(long timeOutMs) throws Exception { 941 final long start = SystemClock.elapsedRealtime(); 942 while (SystemClock.elapsedRealtime() - start < timeOutMs 943 && glSurfaceViewThread.getSurface() == null) { 944 synchronized (surfaceSyncToken) { 945 try { 946 surfaceSyncToken.wait(VIEW_WAITTIME_MS); 947 } catch (InterruptedException e) { 948 Log.e(TAG, "Exception occurred when waiting for the surface from" 949 + " GLSurfaceView to become available.", e); 950 throw new InterruptedException(e.getMessage()); 951 } 952 } 953 } 954 if (glSurfaceViewThread.getSurface() == null) { 955 throw new InterruptedException("Taking too long for the surface from" 956 + " GLSurfaceView to become available."); 957 } 958 Log.i(TAG, NAME + " is available."); 959 } 960 961 @Override getSurface()962 public Surface getSurface() { 963 return glSurfaceViewThread.getSurface(); 964 } 965 966 @Override getVideoViewSnapshot()967 public VideoViewSnapshot getVideoViewSnapshot() { 968 return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT); 969 } 970 byteBufferIsReady()971 public boolean byteBufferIsReady() { 972 return byteBufferIsReady; 973 } 974 getByteBuffer()975 public ByteBuffer getByteBuffer() { 976 return glSurfaceViewThread.getByteBuffer(); 977 } 978 979 /* Does all GL operations. */ 980 private class GLSurfaceViewThread extends Thread 981 implements SurfaceTexture.OnFrameAvailableListener { 982 983 private static final int FLOAT_SIZE_BYTES = 4; 984 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 985 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 986 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 987 private FloatBuffer triangleVertices; 988 private float[] textureTransform = new float[16]; 989 990 private float[] triangleVerticesData = { 991 // X, Y, Z, U, V 992 -1f, -1f, 0f, 0f, 1f, 993 1f, -1f, 0f, 1f, 1f, 994 -1f, 1f, 0f, 0f, 0f, 995 1f, 1f, 0f, 1f, 0f, 996 }; 997 // Make the top-left corner corresponds to texture coordinate 998 // (0, 0). This complies with the transformation matrix obtained from 999 // SurfaceTexture.getTransformMatrix. 1000 1001 private static final String VERTEX_SHADER = 1002 "attribute vec4 aPosition;\n" 1003 + "attribute vec4 aTextureCoord;\n" 1004 + "uniform mat4 uTextureTransform;\n" 1005 + "varying vec2 vTextureCoord;\n" 1006 + "void main() {\n" 1007 + " gl_Position = aPosition;\n" 1008 + " vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n" 1009 + "}\n"; 1010 1011 private static final String FRAGMENT_SHADER = 1012 "#extension GL_OES_EGL_image_external : require\n" 1013 + "precision mediump float;\n" // highp here doesn't seem to matter 1014 + "varying vec2 vTextureCoord;\n" 1015 + "uniform samplerExternalOES sTexture;\n" 1016 + "void main() {\n" 1017 + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" 1018 + "}\n"; 1019 1020 private int glProgram; 1021 private int textureID = -1; 1022 private int aPositionHandle; 1023 private int aTextureHandle; 1024 private int uTextureTransformHandle; 1025 private EGLDisplay eglDisplay = null; 1026 private EGLContext eglContext = null; 1027 private EGLSurface eglSurface = null; 1028 private EGL10 egl10; 1029 private Surface surface = null; 1030 private SurfaceTexture surfaceTexture; 1031 private ByteBuffer byteBuffer; 1032 private Looper looper; 1033 GLSurfaceViewThread()1034 public GLSurfaceViewThread() {} 1035 1036 @Override run()1037 public void run() { 1038 Looper.prepare(); 1039 looper = Looper.myLooper(); 1040 triangleVertices = ByteBuffer 1041 .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES) 1042 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 1043 triangleVertices.put(triangleVerticesData).position(0); 1044 1045 eglSetup(); 1046 makeCurrent(); 1047 eglSurfaceCreated(); 1048 1049 surfaceTexture = new SurfaceTexture(getTextureId()); 1050 surfaceTexture.setOnFrameAvailableListener(this); 1051 surface = new Surface(surfaceTexture); 1052 synchronized (surfaceSyncToken) { 1053 surfaceSyncToken.notify(); 1054 } 1055 // Store pixels from surface 1056 byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4); 1057 byteBuffer.order(ByteOrder.LITTLE_ENDIAN); 1058 Looper.loop(); 1059 } 1060 1061 @Override onFrameAvailable(SurfaceTexture st)1062 public void onFrameAvailable(SurfaceTexture st) { 1063 checkGlError("before updateTexImage"); 1064 surfaceTexture.updateTexImage(); 1065 st.getTransformMatrix(textureTransform); 1066 drawFrame(); 1067 saveFrame(); 1068 } 1069 1070 /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */ eglSetup()1071 public void eglSetup() { 1072 egl10 = (EGL10) EGLContext.getEGL(); 1073 eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 1074 if (eglDisplay == EGL10.EGL_NO_DISPLAY) { 1075 throw new RuntimeException("unable to get egl10 display"); 1076 } 1077 int[] version = new int[2]; 1078 if (!egl10.eglInitialize(eglDisplay, version)) { 1079 eglDisplay = null; 1080 throw new RuntimeException("unable to initialize egl10"); 1081 } 1082 // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB. 1083 int[] configAttribs = { 1084 EGL10.EGL_RED_SIZE, 8, 1085 EGL10.EGL_GREEN_SIZE, 8, 1086 EGL10.EGL_BLUE_SIZE, 8, 1087 EGL10.EGL_ALPHA_SIZE, 8, 1088 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 1089 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, 1090 EGL10.EGL_NONE 1091 }; 1092 EGLConfig[] configs = new EGLConfig[1]; 1093 int[] numConfigs = new int[1]; 1094 if (!egl10.eglChooseConfig( 1095 eglDisplay, configAttribs, configs, configs.length, numConfigs)) { 1096 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 1097 } 1098 // Configure EGL context for OpenGL ES 2.0. 1099 int[] contextAttribs = { 1100 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 1101 EGL10.EGL_NONE 1102 }; 1103 eglContext = egl10.eglCreateContext( 1104 eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs); 1105 checkEglError("eglCreateContext"); 1106 if (eglContext == null) { 1107 throw new RuntimeException("null context"); 1108 } 1109 // Create a pbuffer surface. 1110 int[] surfaceAttribs = { 1111 EGL10.EGL_WIDTH, VIEW_WIDTH, 1112 EGL10.EGL_HEIGHT, VIEW_HEIGHT, 1113 EGL10.EGL_NONE 1114 }; 1115 eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs); 1116 checkEglError("eglCreatePbufferSurface"); 1117 if (eglSurface == null) { 1118 throw new RuntimeException("surface was null"); 1119 } 1120 } 1121 release()1122 public void release() { 1123 looper.quit(); 1124 surface.release(); 1125 surfaceTexture.release(); 1126 byteBufferIsReady = false; 1127 byteBuffer = null; 1128 if (eglDisplay != EGL10.EGL_NO_DISPLAY) { 1129 egl10.eglMakeCurrent(eglDisplay, 1130 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 1131 egl10.eglDestroySurface(eglDisplay, eglSurface); 1132 egl10.eglDestroyContext(eglDisplay, eglContext); 1133 //TODO: uncomment following line after fixing crash in GL driver libGLESv2_adreno.so 1134 //TODO: see b/123755902 1135 //egl10.eglTerminate(eglDisplay); 1136 } 1137 eglDisplay = EGL10.EGL_NO_DISPLAY; 1138 eglContext = EGL10.EGL_NO_CONTEXT; 1139 eglSurface = EGL10.EGL_NO_SURFACE; 1140 } 1141 1142 /* Makes our EGL context and surface current. */ makeCurrent()1143 public void makeCurrent() { 1144 if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { 1145 throw new RuntimeException("eglMakeCurrent failed"); 1146 } 1147 checkEglError("eglMakeCurrent"); 1148 } 1149 1150 /* Call this after the EGL Surface is created and made current. */ eglSurfaceCreated()1151 public void eglSurfaceCreated() { 1152 glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 1153 if (glProgram == 0) { 1154 throw new RuntimeException("failed creating program"); 1155 } 1156 aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition"); 1157 checkLocation(aPositionHandle, "aPosition"); 1158 aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord"); 1159 checkLocation(aTextureHandle, "aTextureCoord"); 1160 uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform"); 1161 checkLocation(uTextureTransformHandle, "uTextureTransform"); 1162 1163 int[] textures = new int[1]; 1164 GLES20.glGenTextures(1, textures, 0); 1165 checkGlError("glGenTextures"); 1166 textureID = textures[0]; 1167 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1168 checkGlError("glBindTexture"); 1169 1170 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 1171 GLES20.GL_LINEAR); 1172 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 1173 GLES20.GL_LINEAR); 1174 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1175 GLES20.GL_CLAMP_TO_EDGE); 1176 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1177 GLES20.GL_CLAMP_TO_EDGE); 1178 checkGlError("glTexParameter"); 1179 } 1180 drawFrame()1181 public void drawFrame() { 1182 GLES20.glUseProgram(glProgram); 1183 checkGlError("glUseProgram"); 1184 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1185 checkGlError("glActiveTexture"); 1186 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 1187 checkGlError("glBindTexture"); 1188 1189 triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1190 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1191 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1192 checkGlError("glVertexAttribPointer aPositionHandle"); 1193 GLES20.glEnableVertexAttribArray(aPositionHandle); 1194 checkGlError("glEnableVertexAttribArray aPositionHandle"); 1195 1196 triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1197 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1198 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 1199 checkGlError("glVertexAttribPointer aTextureHandle"); 1200 GLES20.glEnableVertexAttribArray(aTextureHandle); 1201 checkGlError("glEnableVertexAttribArray aTextureHandle"); 1202 1203 GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0); 1204 checkGlError("glUniformMatrix uTextureTransformHandle"); 1205 1206 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1207 checkGlError("glDrawArrays"); 1208 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 1209 } 1210 1211 /* Reads the pixels to a ByteBuffer. */ saveFrame()1212 public void saveFrame() { 1213 byteBufferIsReady = false; 1214 byteBuffer.clear(); 1215 GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA, 1216 GLES20.GL_UNSIGNED_BYTE, byteBuffer); 1217 byteBufferIsReady = true; 1218 } 1219 getTextureId()1220 public int getTextureId() { 1221 return textureID; 1222 } 1223 getSurface()1224 public Surface getSurface() { 1225 return surface; 1226 } 1227 getByteBuffer()1228 public ByteBuffer getByteBuffer() { 1229 return byteBuffer; 1230 } 1231 loadShader(int shaderType, String source)1232 private int loadShader(int shaderType, String source) { 1233 int shader = GLES20.glCreateShader(shaderType); 1234 checkGlError("glCreateShader type=" + shaderType); 1235 GLES20.glShaderSource(shader, source); 1236 GLES20.glCompileShader(shader); 1237 int[] compiled = new int[1]; 1238 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 1239 1240 if (compiled[0] == 0) { 1241 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 1242 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 1243 GLES20.glDeleteShader(shader); 1244 shader = 0; 1245 } 1246 return shader; 1247 } 1248 createProgram(String vertexSource, String fragmentSource)1249 private int createProgram(String vertexSource, String fragmentSource) { 1250 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 1251 if (vertexShader == 0) { 1252 return 0; 1253 } 1254 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 1255 if (pixelShader == 0) { 1256 return 0; 1257 } 1258 int program = GLES20.glCreateProgram(); 1259 if (program == 0) { 1260 Log.e(TAG, "Could not create program"); 1261 } 1262 GLES20.glAttachShader(program, vertexShader); 1263 checkGlError("glAttachShader"); 1264 GLES20.glAttachShader(program, pixelShader); 1265 checkGlError("glAttachShader"); 1266 GLES20.glLinkProgram(program); 1267 int[] linkStatus = new int[1]; 1268 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 1269 1270 if (linkStatus[0] != GLES20.GL_TRUE) { 1271 Log.e(TAG, "Could not link program: "); 1272 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 1273 GLES20.glDeleteProgram(program); 1274 program = 0; 1275 } 1276 return program; 1277 } 1278 checkEglError(String msg)1279 private void checkEglError(String msg) { 1280 int error; 1281 if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) { 1282 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 1283 } 1284 } 1285 checkGlError(String op)1286 public void checkGlError(String op) { 1287 int error; 1288 if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1289 Log.e(TAG, op + ": glError " + error); 1290 throw new RuntimeException(op + ": glError " + error); 1291 } 1292 } 1293 checkLocation(int location, String label)1294 public void checkLocation(int location, String label) { 1295 if (location < 0) { 1296 throw new RuntimeException("Unable to locate '" + label + "' in program"); 1297 } 1298 } 1299 } 1300 1301 } 1302 1303 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */ 1304 abstract class VideoViewSnapshot implements Runnable { 1305 1306 public static final long SNAPSHOT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(30); 1307 public static final long SLEEP_TIME_MS = 30; 1308 public static final Object SYNC_TOKEN = new Object(); 1309 getBitmap()1310 public abstract Bitmap getBitmap(); 1311 isBitmapReady()1312 public abstract boolean isBitmapReady(); 1313 getSyncObject()1314 public abstract Object getSyncObject(); 1315 1316 } 1317 1318 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. 1319 * This class is to be used together with 1320 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1321 */ 1322 class TextureViewSnapshot extends VideoViewSnapshot { 1323 1324 private final TextureView tv; 1325 private Bitmap bitmap = null; 1326 TextureViewSnapshot(TextureView tv)1327 public TextureViewSnapshot(TextureView tv) { 1328 this.tv = DecodeAccuracyTestBase.checkNotNull(tv); 1329 } 1330 1331 @Override run()1332 public void run() { 1333 bitmap = null; 1334 bitmap = tv.getBitmap(); 1335 synchronized (SYNC_TOKEN) { 1336 SYNC_TOKEN.notify(); 1337 } 1338 } 1339 1340 @Override getBitmap()1341 public Bitmap getBitmap() { 1342 return bitmap; 1343 } 1344 1345 @Override isBitmapReady()1346 public boolean isBitmapReady() { 1347 return bitmap != null; 1348 } 1349 1350 @Override getSyncObject()1351 public Object getSyncObject() { 1352 return SYNC_TOKEN; 1353 } 1354 1355 } 1356 1357 /** 1358 * Method to get bitmap of a {@link SurfaceView}. 1359 * Note that PixelCopy does not have to be called in a runnable. 1360 * This class is to be used together with 1361 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1362 */ 1363 class SurfaceViewSnapshot extends VideoViewSnapshot { 1364 1365 private static final String TAG = SurfaceViewSnapshot.class.getSimpleName(); 1366 private static final int PIXELCOPY_TIMEOUT_MS = 1000; 1367 private static final int INITIAL_STATE = -1; 1368 1369 private final SurfaceView surfaceView; 1370 private final int width; 1371 private final int height; 1372 1373 private Bitmap bitmap; 1374 private int copyResult; 1375 SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height)1376 public SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height) { 1377 this.surfaceView = surfaceView; 1378 this.width = width; 1379 this.height = height; 1380 this.copyResult = INITIAL_STATE; 1381 this.bitmap = null; 1382 } 1383 1384 @Override run()1385 public void run() { 1386 final long start = SystemClock.elapsedRealtime(); 1387 copyResult = INITIAL_STATE; 1388 final SynchronousPixelCopy copyHelper = new SynchronousPixelCopy(); 1389 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); 1390 try { 1391 // Wait for PixelCopy to finish. 1392 while ((copyResult = copyHelper.request(surfaceView, bitmap)) != PixelCopy.SUCCESS 1393 && (SystemClock.elapsedRealtime() - start) < SNAPSHOT_TIMEOUT_MS) { 1394 Thread.sleep(SLEEP_TIME_MS); 1395 } 1396 } catch (InterruptedException e) { 1397 Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e); 1398 bitmap = null; 1399 } finally { 1400 copyHelper.release(); 1401 synchronized (SYNC_TOKEN) { 1402 SYNC_TOKEN.notify(); 1403 } 1404 } 1405 } 1406 1407 @Override getBitmap()1408 public Bitmap getBitmap() { 1409 return bitmap; 1410 } 1411 1412 @Override isBitmapReady()1413 public boolean isBitmapReady() { 1414 return bitmap != null && copyResult == PixelCopy.SUCCESS; 1415 } 1416 1417 @Override getSyncObject()1418 public Object getSyncObject() { 1419 return SYNC_TOKEN; 1420 } 1421 1422 private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener { 1423 1424 private final Handler handler; 1425 private final HandlerThread thread; 1426 1427 private int status = INITIAL_STATE; 1428 SynchronousPixelCopy()1429 public SynchronousPixelCopy() { 1430 this.thread = new HandlerThread("PixelCopyHelper"); 1431 thread.start(); 1432 this.handler = new Handler(thread.getLooper()); 1433 } 1434 release()1435 public void release() { 1436 if (thread.isAlive()) { 1437 thread.quit(); 1438 } 1439 } 1440 request(SurfaceView source, Bitmap dest)1441 public int request(SurfaceView source, Bitmap dest) { 1442 synchronized (this) { 1443 try { 1444 PixelCopy.request(source, dest, this, handler); 1445 return getResultLocked(); 1446 } catch (Exception e) { 1447 Log.e(TAG, "Exception occurred when copying a SurfaceView.", e); 1448 return -1; 1449 } 1450 } 1451 } 1452 getResultLocked()1453 private int getResultLocked() { 1454 try { 1455 this.wait(PIXELCOPY_TIMEOUT_MS); 1456 } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ } 1457 return status; 1458 } 1459 1460 @Override onPixelCopyFinished(int copyResult)1461 public void onPixelCopyFinished(int copyResult) { 1462 synchronized (this) { 1463 status = copyResult; 1464 this.notify(); 1465 } 1466 } 1467 1468 } 1469 1470 } 1471 1472 /** 1473 * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler. 1474 * Note, because of how the bitmap is captured in GLSurfaceView, 1475 * this method does not have to be a runnable. 1476 * This class is to be used together with 1477 * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)} 1478 */ 1479 class GLSurfaceViewSnapshot extends VideoViewSnapshot { 1480 1481 private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName(); 1482 1483 private final GLSurfaceViewFactory glSurfaceViewFactory; 1484 private final int width; 1485 private final int height; 1486 1487 private Bitmap bitmap = null; 1488 private boolean bitmapIsReady = false; 1489 GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1490 public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) { 1491 this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory); 1492 this.width = width; 1493 this.height = height; 1494 } 1495 1496 @Override run()1497 public void run() { 1498 bitmapIsReady = false; 1499 bitmap = null; 1500 try { 1501 waitForByteBuffer(); 1502 } catch (InterruptedException exception) { 1503 Log.e(TAG, exception.getMessage()); 1504 bitmap = null; 1505 notifyObject(); 1506 return; 1507 } 1508 try { 1509 final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer(); 1510 bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 1511 byteBuffer.rewind(); 1512 bitmap.copyPixelsFromBuffer(byteBuffer); 1513 bitmapIsReady = true; 1514 byteBuffer.clear(); 1515 } catch (NullPointerException exception) { 1516 Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception); 1517 bitmap = null; 1518 } finally { 1519 notifyObject(); 1520 } 1521 } 1522 1523 @Override getBitmap()1524 public Bitmap getBitmap() { 1525 return bitmap; 1526 } 1527 1528 @Override isBitmapReady()1529 public boolean isBitmapReady() { 1530 return bitmapIsReady; 1531 } 1532 1533 @Override getSyncObject()1534 public Object getSyncObject() { 1535 return SYNC_TOKEN; 1536 } 1537 notifyObject()1538 private void notifyObject() { 1539 synchronized (SYNC_TOKEN) { 1540 SYNC_TOKEN.notify(); 1541 } 1542 } 1543 waitForByteBuffer()1544 private void waitForByteBuffer() throws InterruptedException { 1545 // Wait for byte buffer to be ready. 1546 final long start = SystemClock.elapsedRealtime(); 1547 while (SystemClock.elapsedRealtime() - start < SNAPSHOT_TIMEOUT_MS) { 1548 if (glSurfaceViewFactory.byteBufferIsReady()) { 1549 return; 1550 } 1551 Thread.sleep(SLEEP_TIME_MS); 1552 } 1553 throw new InterruptedException("Taking too long to read pixels into a ByteBuffer."); 1554 } 1555 1556 } 1557 1558 /* Stores information of a video file. */ 1559 class VideoFormat { 1560 1561 public static final String STRING_UNSET = "UNSET"; 1562 public static final int INT_UNSET = -1; 1563 1564 private final String filename; 1565 1566 private String mimeType = STRING_UNSET; 1567 private int width = INT_UNSET; 1568 private int height = INT_UNSET; 1569 private int maxWidth = INT_UNSET; 1570 private int maxHeight = INT_UNSET; 1571 private FilenameParser filenameParser; 1572 VideoFormat(String filename)1573 public VideoFormat(String filename) { 1574 this.filename = filename; 1575 } 1576 VideoFormat(VideoFormat videoFormat)1577 public VideoFormat(VideoFormat videoFormat) { 1578 this(videoFormat.filename); 1579 } 1580 getParsedName()1581 private FilenameParser getParsedName() { 1582 if (filenameParser == null) { 1583 filenameParser = new FilenameParser(filename); 1584 } 1585 return filenameParser; 1586 } 1587 getMediaFormat()1588 public String getMediaFormat() { 1589 return "video"; 1590 } 1591 setMimeType(String mimeType)1592 public void setMimeType(String mimeType) { 1593 this.mimeType = mimeType; 1594 } 1595 getMimeType()1596 public String getMimeType() { 1597 if (mimeType.equals(STRING_UNSET)) { 1598 return getParsedName().getMimeType(); 1599 } 1600 return mimeType; 1601 } 1602 setWidth(int width)1603 public void setWidth(int width) { 1604 this.width = width; 1605 } 1606 setMaxWidth(int maxWidth)1607 public void setMaxWidth(int maxWidth) { 1608 this.maxWidth = maxWidth; 1609 } 1610 getWidth()1611 public int getWidth() { 1612 if (width == INT_UNSET) { 1613 return getParsedName().getWidth(); 1614 } 1615 return width; 1616 } 1617 getMaxWidth()1618 public int getMaxWidth() { 1619 return maxWidth; 1620 } 1621 getOriginalWidth()1622 public int getOriginalWidth() { 1623 return getParsedName().getWidth(); 1624 } 1625 setHeight(int height)1626 public void setHeight(int height) { 1627 this.height = height; 1628 } 1629 setMaxHeight(int maxHeight)1630 public void setMaxHeight(int maxHeight) { 1631 this.maxHeight = maxHeight; 1632 } 1633 getHeight()1634 public int getHeight() { 1635 if (height == INT_UNSET) { 1636 return getParsedName().getHeight(); 1637 } 1638 return height; 1639 } 1640 getMaxHeight()1641 public int getMaxHeight() { 1642 return maxHeight; 1643 } 1644 getOriginalHeight()1645 public int getOriginalHeight() { 1646 return getParsedName().getHeight(); 1647 } 1648 isAbrEnabled()1649 public boolean isAbrEnabled() { 1650 return false; 1651 } 1652 getOriginalSize()1653 public String getOriginalSize() { 1654 if (width == INT_UNSET || height == INT_UNSET) { 1655 return getParsedName().getSize(); 1656 } 1657 return width + "x" + height; 1658 } 1659 getDescription()1660 public String getDescription() { 1661 return getParsedName().getDescription(); 1662 } 1663 toPrettyString()1664 public String toPrettyString() { 1665 return getParsedName().toPrettyString(); 1666 } 1667 getAssetFileDescriptor()1668 public AssetFileDescriptor getAssetFileDescriptor() throws FileNotFoundException { 1669 File inpFile = new File(WorkDir.getMediaDirString() + "assets/decode_accuracy/" + filename); 1670 ParcelFileDescriptor parcelFD = 1671 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY); 1672 return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize()); 1673 } 1674 1675 } 1676 1677 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */ 1678 class FilenameParser { 1679 1680 static final String VP9 = "vp9"; 1681 static final String H264 = "h264"; 1682 1683 private final String filename; 1684 1685 private String codec = VideoFormat.STRING_UNSET; 1686 private String description = VideoFormat.STRING_UNSET; 1687 private int width = VideoFormat.INT_UNSET; 1688 private int height = VideoFormat.INT_UNSET; 1689 FilenameParser(String filename)1690 FilenameParser(String filename) { 1691 this.filename = filename; 1692 parseFilename(filename); 1693 } 1694 getCodec()1695 public String getCodec() { 1696 return codec; 1697 } 1698 getMimeType()1699 public String getMimeType() { 1700 switch (codec) { 1701 case H264: 1702 return MimeTypes.VIDEO_H264; 1703 case VP9: 1704 return MimeTypes.VIDEO_VP9; 1705 default: 1706 return null; 1707 } 1708 } 1709 getWidth()1710 public int getWidth() { 1711 return width; 1712 } 1713 getHeight()1714 public int getHeight() { 1715 return height; 1716 } 1717 getSize()1718 public String getSize() { 1719 return width + "x" + height; 1720 } 1721 getDescription()1722 public String getDescription() { 1723 return description; 1724 } 1725 toPrettyString()1726 String toPrettyString() { 1727 if (codec != null) { 1728 return codec.toUpperCase() + " " + getSize(); 1729 } 1730 return filename; 1731 } 1732 parseFilename(String filename)1733 private void parseFilename(String filename) { 1734 final String descriptionDelimiter = "-"; 1735 final String infoDelimiter = "_"; 1736 final String sizeDelimiter = "x"; 1737 try { 1738 this.description = filename.split(descriptionDelimiter)[0]; 1739 final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter); 1740 this.codec = fileInfo[0]; 1741 this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]); 1742 this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]); 1743 } catch (Exception exception) { /* Filename format does not match. */ } 1744 } 1745 1746 } 1747 1748 /** 1749 * Compares bitmaps to determine if they are similar. 1750 * 1751 * <p>To determine greatest pixel difference we transform each pixel into the 1752 * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences. 1753 */ 1754 class BitmapCompare { 1755 1756 private static final int RED = 0; 1757 private static final int GREEN = 1; 1758 private static final int BLUE = 2; 1759 private static final int X = 0; 1760 private static final int Y = 1; 1761 private static final int Z = 2; 1762 BitmapCompare()1763 private BitmapCompare() {} 1764 1765 /** 1766 * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity. 1767 * 1768 * @param bitmap1 A bitmap to compare to bitmap2. 1769 * @param bitmap2 A bitmap to compare to bitmap1. 1770 * @return A {@link Difference} with an integer describing the greatest pixel difference, 1771 * using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional 1772 * {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found. 1773 */ 1774 @TargetApi(12) computeDifference(Bitmap bitmap1, Bitmap bitmap2)1775 public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) { 1776 if (bitmap1 == null || bitmap2 == null) { 1777 return new Difference(Integer.MAX_VALUE); 1778 } 1779 if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) { 1780 return new Difference(0); 1781 } 1782 if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) { 1783 return new Difference(Integer.MAX_VALUE); 1784 } 1785 // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using 1786 // euclidean distance formula. 1787 final double[][] pixels1 = convertRgbToCieLab(bitmap1); 1788 final double[][] pixels2 = convertRgbToCieLab(bitmap2); 1789 int greatestDifference = 0; 1790 int greatestDifferenceIndex = -1; 1791 for (int i = 0; i < pixels1.length; i++) { 1792 final int difference = euclideanDistance(pixels1[i], pixels2[i]); 1793 if (difference > greatestDifference) { 1794 greatestDifference = difference; 1795 greatestDifferenceIndex = i; 1796 } 1797 } 1798 return new Difference(greatestDifference, Pair.create( 1799 greatestDifferenceIndex % bitmap1.getWidth(), 1800 greatestDifferenceIndex / bitmap1.getWidth())); 1801 } 1802 1803 @SuppressLint("UseSparseArrays") convertRgbToCieLab(Bitmap bitmap)1804 private static double[][] convertRgbToCieLab(Bitmap bitmap) { 1805 final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>(); 1806 final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3]; 1807 final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()]; 1808 bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 1809 for (int i = 0; i < pixels.length; i++) { 1810 final double[] transformedColor = pixelTransformCache.get(pixels[i]); 1811 if (transformedColor != null) { 1812 result[i] = transformedColor; 1813 } else { 1814 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i])); 1815 pixelTransformCache.put(pixels[i], result[i]); 1816 } 1817 } 1818 return result; 1819 } 1820 1821 /** 1822 * Conversion from RGB to XYZ based algorithm as defined by: 1823 * http://www.easyrgb.com/index.php?X=MATH&H=02#text2 1824 * 1825 * <p><pre>{@code 1826 * var_R = ( R / 255 ) //R from 0 to 255 1827 * var_G = ( G / 255 ) //G from 0 to 255 1828 * var_B = ( B / 255 ) //B from 0 to 255 1829 * 1830 * if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4 1831 * else var_R = var_R / 12.92 1832 * if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4 1833 * else var_G = var_G / 12.92 1834 * if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4 1835 * else var_B = var_B / 12.92 1836 * 1837 * var_R = var_R * 100 1838 * var_G = var_G * 100 1839 * var_B = var_B * 100 1840 * 1841 * // Observer. = 2°, Illuminant = D65 1842 * X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805 1843 * Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722 1844 * Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505 1845 * }</pre> 1846 * 1847 * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue. 1848 * @return An array of doubles where each value is a component of the XYZ color space. 1849 */ convertRgbToXyz(int rgbColor)1850 private static double[] convertRgbToXyz(int rgbColor) { 1851 final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)}; 1852 for (int i = 0; i < comp.length; i++) { 1853 comp[i] /= 255.0; 1854 if (comp[i] > 0.04045) { 1855 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4); 1856 } else { 1857 comp[i] /= 12.92; 1858 } 1859 comp[i] *= 100; 1860 } 1861 final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805); 1862 final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722); 1863 final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505); 1864 return new double[] {x, y, z}; 1865 } 1866 1867 /** 1868 * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by: 1869 * http://www.easyrgb.com/index.php?X=MATH&H=07#text7 1870 * 1871 * <p><pre> 1872 * {@code 1873 * var_X = X / ref_X //ref_X = 95.047 Observer= 2°, Illuminant= D65 1874 * var_Y = Y / ref_Y //ref_Y = 100.000 1875 * var_Z = Z / ref_Z //ref_Z = 108.883 1876 * 1877 * if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 ) 1878 * else var_X = ( 7.787 * var_X ) + ( 16 / 116 ) 1879 * if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 ) 1880 * else var_Y = ( 7.787 * var_Y ) + ( 16 / 116 ) 1881 * if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 ) 1882 * else var_Z = ( 7.787 * var_Z ) + ( 16 / 116 ) 1883 * 1884 * CIE-L* = ( 116 * var_Y ) - 16 1885 * CIE-a* = 500 * ( var_X - var_Y ) 1886 * CIE-b* = 200 * ( var_Y - var_Z ) 1887 * } 1888 * </pre> 1889 * 1890 * @param comp An array of doubles where each value is a component of the XYZ color space. 1891 * @return An array of doubles where each value is a component of the CIE-L*a*b* color space. 1892 */ convertXyzToCieLab(double[] comp)1893 private static double[] convertXyzToCieLab(double[] comp) { 1894 comp[X] /= 95.047; 1895 comp[Y] /= 100.0; 1896 comp[Z] /= 108.883; 1897 for (int i = 0; i < comp.length; i++) { 1898 if (comp[i] > 0.008856) { 1899 comp[i] = Math.pow(comp[i], (1.0 / 3.0)); 1900 } else { 1901 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0); 1902 } 1903 } 1904 final double l = (116 * comp[Y]) - 16; 1905 final double a = 500 * (comp[X] - comp[Y]); 1906 final double b = 200 * (comp[Y] - comp[Z]); 1907 return new double[] {l, a, b}; 1908 } 1909 euclideanDistance(double[] p1, double[] p2)1910 private static int euclideanDistance(double[] p1, double[] p2) { 1911 if (p1.length != p2.length) { 1912 return Integer.MAX_VALUE; 1913 } 1914 double result = 0; 1915 for (int i = 0; i < p1.length; i++) { 1916 result += Math.pow(p1[i] - p2[i], 2); 1917 } 1918 return (int) Math.round(Math.sqrt(result)); 1919 } 1920 1921 /** 1922 * Crops the border of the array representing an image by hBorderSize 1923 * pixels on the left and right borders, and by vBorderSize pixels on the 1924 * top and bottom borders (so the width is 2 * hBorderSize smaller and 1925 * the height is 2 * vBorderSize smaller), then scales the image up to 1926 * match the original size using bilinear interpolation. 1927 */ shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1928 private static Bitmap shrinkAndScaleBilinear( 1929 Bitmap input, double hBorderSize, double vBorderSize) { 1930 1931 int width = input.getWidth(); 1932 int height = input.getHeight(); 1933 1934 // Compute the proper step sizes 1935 double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1); 1936 double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1); 1937 1938 // Read the input bitmap into RGB arrays. 1939 int[] inputPixels = new int[width * height]; 1940 input.getPixels(inputPixels, 0, width, 0, 0, width, height); 1941 int[][] inputRgb = new int[width * height][3]; 1942 for (int i = 0; i < width * height; ++i) { 1943 inputRgb[i][0] = Color.red(inputPixels[i]); 1944 inputRgb[i][1] = Color.green(inputPixels[i]); 1945 inputRgb[i][2] = Color.blue(inputPixels[i]); 1946 } 1947 inputPixels = null; 1948 1949 // Prepare the output buffer. 1950 int[] outputPixels = new int[width * height]; 1951 1952 // Start the iteration. The first y coordinate is vBorderSize. 1953 double y = vBorderSize; 1954 for (int yIndex = 0; yIndex < height; ++yIndex) { 1955 // The first x coordinate is hBorderSize. 1956 double x = hBorderSize; 1957 for (int xIndex = 0; xIndex < width; ++xIndex) { 1958 // Determine the square of interest. 1959 int left = (int)x; // This is floor(x). 1960 int top = (int)y; // This is floor(y). 1961 int right = left + 1; 1962 int bottom = top + 1; 1963 1964 // (u, v) is the fractional part of (x, y). 1965 double u = x - (double)left; 1966 double v = y - (double)top; 1967 1968 // Precompute necessary products to save time. 1969 double p00 = (1.0 - u) * (1.0 - v); 1970 double p01 = (1.0 - u) * v; 1971 double p10 = u * (1.0 - v); 1972 double p11 = u * v; 1973 1974 // Clamp the indices to prevent out-of-bound that may be caused 1975 // by round-off error. 1976 if (left >= width) left = width - 1; 1977 if (top >= height) top = height - 1; 1978 if (right >= width) right = width - 1; 1979 if (bottom >= height) bottom = height - 1; 1980 1981 // Sample RGB values from the four corners. 1982 int[] rgb00 = inputRgb[top * width + left]; 1983 int[] rgb01 = inputRgb[bottom * width + left]; 1984 int[] rgb10 = inputRgb[top * width + right]; 1985 int[] rgb11 = inputRgb[bottom * width + right]; 1986 1987 // Interpolate each component of RGB separately. 1988 int[] mixedColor = new int[3]; 1989 for (int k = 0; k < 3; ++k) { 1990 mixedColor[k] = (int)Math.round( 1991 p00 * (double) rgb00[k] + p01 * (double) rgb01[k] 1992 + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]); 1993 } 1994 // Convert RGB to bitmap Color format and store. 1995 outputPixels[yIndex * width + xIndex] = Color.rgb( 1996 mixedColor[0], mixedColor[1], mixedColor[2]); 1997 x += xInc; 1998 } 1999 y += yInc; 2000 } 2001 // Assemble the output buffer into a Bitmap object. 2002 return Bitmap.createBitmap(outputPixels, width, height, input.getConfig()); 2003 } 2004 2005 /** 2006 * Calls computeDifference on multiple cropped-and-scaled versions of 2007 * bitmap2. 2008 */ 2009 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops)2010 public static Difference computeMinimumDifference( 2011 Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) { 2012 2013 // Compute the difference with the original image (bitmap2) first. 2014 Difference minDiff = computeDifference(bitmap1, bitmap2); 2015 // Then go through the list of borderCrops. 2016 for (Pair<Double, Double> borderCrop : borderCrops) { 2017 // Compute the difference between bitmap1 and a transformed 2018 // version of bitmap2. 2019 Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second); 2020 Difference d = computeDifference(bitmap1, bitmap2s); 2021 // Keep the minimum difference. 2022 if (d.greatestPixelDifference < minDiff.greatestPixelDifference) { 2023 minDiff = d; 2024 minDiff.bestMatchBorderCrop = borderCrop; 2025 } 2026 } 2027 return minDiff; 2028 } 2029 2030 /** 2031 * Calls computeMinimumDifference on a default list of borderCrop. 2032 */ 2033 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight)2034 public static Difference computeMinimumDifference( 2035 Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) { 2036 2037 double hBorder = (double) bitmap1.getWidth() / (double) trueWidth; 2038 double vBorder = (double) bitmap1.getHeight() / (double) trueHeight; 2039 double hBorderH = 0.5 * hBorder; // Half-texel horizontal border 2040 double vBorderH = 0.5 * vBorder; // Half-texel vertical border 2041 return computeMinimumDifference( 2042 bitmap1, 2043 bitmap2, 2044 new Pair[] { 2045 Pair.create(hBorderH, 0.0), 2046 Pair.create(hBorderH, vBorderH), 2047 Pair.create(0.0, vBorderH), 2048 Pair.create(hBorder, 0.0), 2049 Pair.create(hBorder, vBorder), 2050 Pair.create(0.0, vBorder) 2051 }); 2052 // This default list of borderCrop comes from the behavior of 2053 // GLConsumer.computeTransformMatrix(). 2054 } 2055 2056 /* Describes the difference between two {@link Bitmap} instances. */ 2057 public static final class Difference { 2058 2059 public final int greatestPixelDifference; 2060 public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates; 2061 public Pair<Double, Double> bestMatchBorderCrop; 2062 Difference(int greatestPixelDifference)2063 private Difference(int greatestPixelDifference) { 2064 this(greatestPixelDifference, null, Pair.create(0.0, 0.0)); 2065 } 2066 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2067 private Difference( 2068 int greatestPixelDifference, 2069 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) { 2070 this(greatestPixelDifference, greatestPixelDifferenceCoordinates, 2071 Pair.create(0.0, 0.0)); 2072 } 2073 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2074 private Difference( 2075 int greatestPixelDifference, 2076 Pair<Integer, Integer> greatestPixelDifferenceCoordinates, 2077 Pair<Double, Double> bestMatchBorderCrop) { 2078 this.greatestPixelDifference = greatestPixelDifference; 2079 this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates; 2080 this.bestMatchBorderCrop = bestMatchBorderCrop; 2081 } 2082 } 2083 2084 } 2085 2086 /* Wrapper for MIME types. */ 2087 final class MimeTypes { 2088 MimeTypes()2089 private MimeTypes() {} 2090 2091 public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9"; 2092 public static final String VIDEO_H264 = "video/avc"; 2093 isVideo(String mimeType)2094 public static boolean isVideo(String mimeType) { 2095 return mimeType.startsWith("video"); 2096 } 2097 2098 } 2099