1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import android.media.cts.R; 19 20 import android.annotation.TargetApi; 21 import android.app.Activity; 22 import android.content.Context; 23 import android.content.pm.ActivityInfo; 24 import android.content.res.Configuration; 25 import android.content.res.Resources; 26 import android.graphics.Bitmap; 27 import android.graphics.Bitmap.Config; 28 import android.graphics.BitmapFactory; 29 import android.graphics.Color; 30 import android.graphics.SurfaceTexture; 31 import android.media.MediaCodec; 32 import android.media.MediaCodec.BufferInfo; 33 import android.media.MediaCodec.CodecException; 34 import android.media.MediaExtractor; 35 import android.media.MediaFormat; 36 import android.net.Uri; 37 import android.opengl.EGL14; 38 import android.opengl.GLES11Ext; 39 import android.opengl.GLES20; 40 import android.opengl.GLSurfaceView; 41 import android.os.Build; 42 import android.os.Handler; 43 import android.os.HandlerThread; 44 import android.os.Looper; 45 import android.os.SystemClock; 46 import android.test.ActivityInstrumentationTestCase2; 47 import android.util.Log; 48 import android.util.Pair; 49 import android.util.SparseArray; 50 import android.view.PixelCopy; 51 import android.view.PixelCopy.OnPixelCopyFinishedListener; 52 import android.view.Surface; 53 import android.view.SurfaceHolder; 54 import android.view.SurfaceView; 55 import android.view.TextureView; 56 import android.view.View; 57 import android.view.ViewGroup; 58 import android.widget.RelativeLayout; 59 60 import java.io.File; 61 import java.io.FileOutputStream; 62 import java.io.InputStream; 63 import java.io.IOException; 64 import java.nio.ByteBuffer; 65 import java.nio.ByteOrder; 66 import java.nio.FloatBuffer; 67 import java.util.concurrent.TimeUnit; 68 69 import javax.microedition.khronos.egl.EGL10; 70 import javax.microedition.khronos.egl.EGLConfig; 71 import javax.microedition.khronos.egl.EGLContext; 72 import javax.microedition.khronos.egl.EGLDisplay; 73 import javax.microedition.khronos.egl.EGLSurface; 74 75 @TargetApi(16) 76 public class DecodeAccuracyTestBase 77 extends ActivityInstrumentationTestCase2<DecodeAccuracyTestActivity> { 78 79 protected Context mContext; 80 protected Resources mResources; 81 protected DecodeAccuracyTestActivity mActivity; 82 protected TestHelper testHelper; 83 DecodeAccuracyTestBase()84 public DecodeAccuracyTestBase() { 85 super(DecodeAccuracyTestActivity.class); 86 } 87 88 @Override setUp()89 protected void setUp() throws Exception { 90 super.setUp(); 91 mActivity = getActivity(); 92 getInstrumentation().waitForIdleSync(); 93 mContext = getInstrumentation().getTargetContext(); 94 mResources = mContext.getResources(); 95 testHelper = new TestHelper(mContext, mActivity); 96 } 97 98 @Override tearDown()99 protected void tearDown() throws Exception { 100 mActivity = null; 101 super.tearDown(); 102 } 103 getHelper()104 protected TestHelper getHelper() { 105 return testHelper; 106 } 107 checkNotNull(T reference)108 public static <T> T checkNotNull(T reference) { 109 assertNotNull(reference); 110 return reference; 111 } 112 113 public static class SimplePlayer { 114 115 public static final long DECODE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(1) / 2; 116 117 private static final int NO_TRACK_INDEX = -3; 118 private static final long DEQUEUE_TIMEOUT_US = 20; 119 private static final String TAG = SimplePlayer.class.getSimpleName(); 120 121 private final Context context; 122 private final MediaExtractor extractor; 123 private MediaCodec decoder; 124 SimplePlayer(Context context)125 public SimplePlayer(Context context) { 126 this(context, new MediaExtractor()); 127 } 128 SimplePlayer(Context context, MediaExtractor extractor)129 public SimplePlayer(Context context, MediaExtractor extractor) { 130 this.context = checkNotNull(context); 131 this.extractor = checkNotNull(extractor); 132 } 133 134 /* 135 * The function play the corresponding file for certain number of frames, 136 * 137 * @param surface is the surface view of decoder output. 138 * @param videoFormat is the format of the video to extract and decode. 139 * @param numOfTotalFrame is the number of Frame wish to play. 140 * @return a PlayerResult object indicating success or failure. 141 */ decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)142 public PlayerResult decodeVideoFrames( 143 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) { 144 PlayerResult playerResult; 145 if (prepare(surface, videoFormat)) { 146 if (startDecoder()) { 147 playerResult = decodeFramesAndDisplay( 148 surface, numOfTotalFrames, numOfTotalFrames * DECODE_TIMEOUT_MS); 149 } else { 150 playerResult = PlayerResult.failToStart(); 151 } 152 } else { 153 playerResult = new PlayerResult(); 154 } 155 release(); 156 return new PlayerResult(playerResult); 157 } 158 decodeVideoFrames(VideoFormat videoFormat, int numOfTotalFrames)159 public PlayerResult decodeVideoFrames(VideoFormat videoFormat, int numOfTotalFrames) { 160 return decodeVideoFrames(null, videoFormat, numOfTotalFrames); 161 } 162 163 /* 164 * The function set up the extractor and decoder with proper format. 165 * This must be called before decodeFramesAndDisplay. 166 */ prepare(Surface surface, VideoFormat videoFormat)167 private boolean prepare(Surface surface, VideoFormat videoFormat) { 168 if (!setExtractorDataSource(videoFormat)) { 169 return false; 170 } 171 int trackNum = getFirstVideoTrackIndex(extractor); 172 if (trackNum == NO_TRACK_INDEX) { 173 return false; 174 } 175 extractor.selectTrack(trackNum); 176 MediaFormat mediaFormat = extractor.getTrackFormat(trackNum); 177 configureFormat(mediaFormat, videoFormat); 178 return configureDecoder(surface, mediaFormat); 179 } 180 181 /* The function decode video frames and display in a surface. */ decodeFramesAndDisplay( Surface surface, int numOfTotalFrames, long timeOutMs)182 private PlayerResult decodeFramesAndDisplay( 183 Surface surface, int numOfTotalFrames, long timeOutMs) { 184 checkNotNull(decoder); 185 int numOfDecodedFrames = 0; 186 long decodeStart = 0; 187 boolean renderToSurface = surface != null ? true : false; 188 BufferInfo info = new BufferInfo(); 189 ByteBuffer inputBuffer; 190 ByteBuffer[] inputBufferArray = decoder.getInputBuffers(); 191 long loopStart = SystemClock.elapsedRealtime(); 192 193 while (numOfDecodedFrames < numOfTotalFrames 194 && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) { 195 try { 196 int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 197 if (inputBufferIndex >= 0) { 198 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { 199 inputBuffer = inputBufferArray[inputBufferIndex]; 200 } else { 201 inputBuffer = decoder.getInputBuffer(inputBufferIndex); 202 } 203 if (decodeStart == 0) { 204 decodeStart = SystemClock.elapsedRealtime(); 205 } 206 int sampleSize = extractor.readSampleData(inputBuffer, 0); 207 if (sampleSize > 0) { 208 decoder.queueInputBuffer( 209 inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0); 210 extractor.advance(); 211 } 212 } 213 int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 214 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 215 break; 216 } 217 if (decoderStatus >= 0 && info.size > 0) { 218 decoder.releaseOutputBuffer(decoderStatus, renderToSurface); 219 numOfDecodedFrames++; 220 } 221 } catch (IllegalStateException exception) { 222 Log.e(TAG, "IllegalStateException in decodeFramesAndDisplay " + exception); 223 break; 224 } 225 } 226 long totalTime = SystemClock.elapsedRealtime() - decodeStart; 227 return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime); 228 } 229 release()230 private void release() { 231 decoderRelease(); 232 extractorRelease(); 233 } 234 setExtractorDataSource(VideoFormat videoFormat)235 private boolean setExtractorDataSource(VideoFormat videoFormat) { 236 try { 237 extractor.setDataSource(context, videoFormat.loadUri(context), null); 238 } catch (IOException exception) { 239 Log.e(TAG, "IOException in setDataSource", exception); 240 return false; 241 } 242 return true; 243 } 244 configureDecoder(Surface surface, MediaFormat mediaFormat)245 private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) { 246 try { 247 decoder = MediaCodec.createDecoderByType( 248 mediaFormat.getString(MediaFormat.KEY_MIME)); 249 decoder.configure(mediaFormat, surface, null, 0); 250 } catch (Exception exception) { 251 if (exception instanceof IOException) { 252 Log.e(TAG, "IOException in createDecoderByType", exception); 253 } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP 254 && exception instanceof CodecException) { 255 Log.e(TAG, "CodecException in createDecoderByType", exception); 256 decoder.reset(); 257 } else { 258 Log.e(TAG, "Unknown exception in createDecoderByType", exception); 259 } 260 decoderRelease(); 261 return false; 262 } 263 return true; 264 } 265 startDecoder()266 private boolean startDecoder() { 267 try { 268 decoder.start(); 269 } catch (Exception exception) { 270 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP 271 && exception instanceof CodecException) { 272 Log.e(TAG, "CodecException in startDecoder", exception); 273 decoder.reset(); 274 } else if (exception instanceof IllegalStateException) { 275 Log.e(TAG, "IllegalStateException in startDecoder", exception); 276 } else { 277 Log.e(TAG, "Unknown exception in startDecoder", exception); 278 } 279 decoderRelease(); 280 return false; 281 } 282 return true; 283 } 284 decoderRelease()285 private void decoderRelease() { 286 if (decoder == null) { 287 return; 288 } 289 try { 290 decoder.stop(); 291 } catch (IllegalStateException exception) { 292 // IllegalStateException happens when decoder fail to start. 293 Log.e(TAG, "IllegalStateException in decoder stop" + exception); 294 } finally { 295 try { 296 decoder.release(); 297 } catch (IllegalStateException exception) { 298 Log.e(TAG, "IllegalStateException in decoder release" + exception); 299 } 300 } 301 decoder = null; 302 } 303 extractorRelease()304 private void extractorRelease() { 305 if (extractor == null) { 306 return; 307 } 308 try { 309 extractor.release(); 310 } catch (IllegalStateException exception) { 311 Log.e(TAG, "IllegalStateException in extractor release" + exception); 312 } 313 } 314 configureFormat(MediaFormat mediaFormat, VideoFormat videoFormat)315 private static void configureFormat(MediaFormat mediaFormat, VideoFormat videoFormat) { 316 checkNotNull(mediaFormat); 317 checkNotNull(videoFormat); 318 videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME)); 319 videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH)); 320 videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)); 321 mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth()); 322 mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight()); 323 324 if (videoFormat.getMaxWidth() != VideoFormat.UNSET 325 && videoFormat.getMaxHeight() != VideoFormat.UNSET) { 326 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getMaxWidth()); 327 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getMaxHeight()); 328 } 329 } 330 331 /* 332 * The function returns the first video track found. 333 * 334 * @param extractor is the media extractor instantiated with a video uri. 335 * @return the index of the first video track if found, NO_TRACK_INDEX otherwise. 336 */ getFirstVideoTrackIndex(MediaExtractor extractor)337 private static int getFirstVideoTrackIndex(MediaExtractor extractor) { 338 for (int i = 0; i < extractor.getTrackCount(); i++) { 339 MediaFormat trackMediaFormat = extractor.getTrackFormat(i); 340 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 341 return i; 342 } 343 } 344 Log.e(TAG, "couldn't get a video track"); 345 return NO_TRACK_INDEX; 346 } 347 348 /* Stores the result from SimplePlayer. */ 349 public static final class PlayerResult { 350 351 public static final int UNSET = -1; 352 private final boolean configureSuccess; 353 private final boolean startSuccess; 354 private final boolean decodeSuccess; 355 private final long totalTime; 356 PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)357 public PlayerResult( 358 boolean configureSuccess, boolean startSuccess, 359 boolean decodeSuccess, long totalTime) { 360 this.configureSuccess = configureSuccess; 361 this.startSuccess = startSuccess; 362 this.decodeSuccess = decodeSuccess; 363 this.totalTime = totalTime; 364 } 365 PlayerResult(PlayerResult playerResult)366 public PlayerResult(PlayerResult playerResult) { 367 this(playerResult.configureSuccess, playerResult.startSuccess, 368 playerResult.decodeSuccess, playerResult.totalTime); 369 } 370 PlayerResult()371 public PlayerResult() { 372 // Dummy PlayerResult. 373 this(false, false, false, UNSET); 374 } 375 failToStart()376 public static PlayerResult failToStart() { 377 return new PlayerResult(true, false, false, UNSET); 378 } 379 isConfigureSuccess()380 public boolean isConfigureSuccess() { 381 return configureSuccess; 382 } 383 isStartSuccess()384 public boolean isStartSuccess() { 385 return startSuccess; 386 } 387 isDecodeSuccess()388 public boolean isDecodeSuccess() { 389 return decodeSuccess; 390 } 391 isSuccess()392 public boolean isSuccess() { 393 return isConfigureSuccess() && isStartSuccess() 394 && isDecodeSuccess() && getTotalTime() != UNSET; 395 } 396 getTotalTime()397 public long getTotalTime() { 398 return totalTime; 399 } 400 isFailureForAll()401 public boolean isFailureForAll() { 402 return (!isConfigureSuccess() && !isStartSuccess() 403 && !isDecodeSuccess() && getTotalTime() == UNSET); 404 } 405 } 406 407 } 408 409 /* Utility class for collecting common test case functionality. */ 410 class TestHelper { 411 412 private final Context context; 413 private final Handler handler; 414 private final Activity activity; 415 TestHelper(Context context, Activity activity)416 public TestHelper(Context context, Activity activity) { 417 this.context = checkNotNull(context); 418 this.handler = new Handler(Looper.getMainLooper()); 419 this.activity = activity; 420 } 421 generateBitmapFromImageResourceId(int resourceId)422 public Bitmap generateBitmapFromImageResourceId(int resourceId) { 423 return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId)); 424 } 425 getContext()426 public Context getContext() { 427 return context; 428 } 429 rotateOrientation()430 public void rotateOrientation() { 431 handler.post(new Runnable() { 432 @Override 433 public void run() { 434 final int orientation = context.getResources().getConfiguration().orientation; 435 if (orientation == Configuration.ORIENTATION_PORTRAIT) { 436 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 437 } else { 438 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); 439 } 440 } 441 }); 442 } 443 unsetOrientation()444 public void unsetOrientation() { 445 handler.post(new Runnable() { 446 @Override 447 public void run() { 448 activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); 449 } 450 }); 451 } 452 generateView(View view)453 public void generateView(View view) { 454 RelativeLayout relativeLayout = 455 (RelativeLayout) activity.findViewById(R.id.attach_view); 456 ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view); 457 handler.post(viewGenerator); 458 } 459 cleanUpView(View view)460 public void cleanUpView(View view) { 461 ViewCleaner viewCleaner = new ViewCleaner(view); 462 handler.post(viewCleaner); 463 } 464 generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)465 public synchronized Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) { 466 handler.post(snapshot); 467 try { 468 while (!snapshot.isBitmapReady()) { 469 Thread.sleep(100); 470 } 471 } catch (InterruptedException e) { 472 e.printStackTrace(); 473 } 474 return snapshot.getBitmap(); 475 } 476 477 private class ViewGenerator implements Runnable { 478 479 private final View view; 480 private final RelativeLayout relativeLayout; 481 ViewGenerator(RelativeLayout relativeLayout, View view)482 public ViewGenerator(RelativeLayout relativeLayout, View view) { 483 this.view = checkNotNull(view); 484 this.relativeLayout = checkNotNull(relativeLayout); 485 } 486 487 @Override run()488 public void run() { 489 if (view.getParent() != null) { 490 ((ViewGroup) view.getParent()).removeView(view); 491 } 492 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( 493 VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT); 494 view.setLayoutParams(params); 495 relativeLayout.addView(view); 496 } 497 498 } 499 500 private class ViewCleaner implements Runnable { 501 502 private final View view; 503 ViewCleaner(View view)504 public ViewCleaner(View view) { 505 this.view = checkNotNull(view); 506 } 507 508 @Override run()509 public void run() { 510 if (view.getParent() != null) { 511 ((ViewGroup) view.getParent()).removeView(view); 512 } 513 } 514 515 } 516 517 } 518 519 } 520 521 /* Factory for manipulating a {@link View}. */ 522 abstract class VideoViewFactory { 523 524 public final long VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(1); 525 public static final int VIEW_WIDTH = 480; 526 public static final int VIEW_HEIGHT = 360; 527 VideoViewFactory()528 public VideoViewFactory() {} 529 release()530 public abstract void release(); 531 getName()532 public abstract String getName(); 533 createView(Context context)534 public abstract View createView(Context context); 535 waitForViewIsAvailable()536 public abstract void waitForViewIsAvailable(); 537 getSurface()538 public abstract Surface getSurface(); 539 getVideoViewSnapshot()540 public abstract VideoViewSnapshot getVideoViewSnapshot(); 541 542 } 543 544 /* Factory for building a {@link TextureView}. */ 545 @TargetApi(16) 546 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener { 547 548 private static final String TAG = TextureViewFactory.class.getSimpleName(); 549 private static final String NAME = "TextureView"; 550 551 private final Object syncToken = new Object(); 552 private TextureView textureView; 553 TextureViewFactory()554 public TextureViewFactory() {} 555 556 @Override createView(Context context)557 public TextureView createView(Context context) { 558 textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context)); 559 textureView.setSurfaceTextureListener(this); 560 return textureView; 561 } 562 563 @Override release()564 public void release() { 565 textureView = null; 566 } 567 568 @Override getName()569 public String getName() { 570 return NAME; 571 } 572 573 @Override getSurface()574 public Surface getSurface() { 575 return new Surface(textureView.getSurfaceTexture()); 576 } 577 578 @Override getVideoViewSnapshot()579 public TextureViewSnapshot getVideoViewSnapshot() { 580 return new TextureViewSnapshot(textureView); 581 } 582 583 @Override waitForViewIsAvailable()584 public void waitForViewIsAvailable() { 585 while (!textureView.isAvailable()) { 586 synchronized (syncToken) { 587 try { 588 syncToken.wait(VIEW_AVAILABLE_TIMEOUT_MS); 589 } catch (InterruptedException exception) { 590 Log.e(TAG, "Taking too long to attach a TextureView to a window.", exception); 591 } 592 } 593 } 594 } 595 596 @Override onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)597 public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { 598 synchronized (syncToken) { 599 syncToken.notify(); 600 } 601 } 602 603 @Override onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)604 public void onSurfaceTextureSizeChanged( 605 SurfaceTexture surfaceTexture, int width, int height) {} 606 607 @Override onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)608 public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { 609 return false; 610 } 611 612 @Override onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)613 public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {} 614 615 } 616 617 /** 618 * Factory for building a {@link SurfaceView} 619 */ 620 @TargetApi(24) 621 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback { 622 623 private static final String TAG = SurfaceViewFactory.class.getSimpleName(); 624 private static final String NAME = "SurfaceView"; 625 626 private final Object syncToken = new Object(); 627 private SurfaceViewSnapshot surfaceViewSnapshot; 628 private SurfaceView surfaceView; 629 private SurfaceHolder surfaceHolder; 630 SurfaceViewFactory()631 public SurfaceViewFactory() {} 632 633 @Override release()634 public void release() { 635 if (surfaceViewSnapshot != null) { 636 surfaceViewSnapshot.release(); 637 } 638 surfaceView = null; 639 surfaceHolder = null; 640 } 641 642 @Override getName()643 public String getName() { 644 return NAME; 645 } 646 647 @Override createView(Context context)648 public View createView(Context context) { 649 Looper.prepare(); 650 surfaceView = new SurfaceView(context); 651 surfaceHolder = surfaceView.getHolder(); 652 surfaceHolder.addCallback(this); 653 return surfaceView; 654 } 655 656 @Override waitForViewIsAvailable()657 public void waitForViewIsAvailable() { 658 while (!getSurface().isValid()) { 659 synchronized (syncToken) { 660 try { 661 syncToken.wait(VIEW_AVAILABLE_TIMEOUT_MS); 662 } catch (InterruptedException exception) { 663 Log.e(TAG, "Taking too long to attach a SurfaceView to a window.", exception); 664 } 665 } 666 } 667 } 668 669 @Override getSurface()670 public Surface getSurface() { 671 return surfaceHolder.getSurface(); 672 } 673 674 @Override getVideoViewSnapshot()675 public VideoViewSnapshot getVideoViewSnapshot() { 676 surfaceViewSnapshot = new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT); 677 return surfaceViewSnapshot; 678 } 679 680 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)681 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 682 683 @Override surfaceCreated(SurfaceHolder holder)684 public void surfaceCreated(SurfaceHolder holder) { 685 synchronized (syncToken) { 686 syncToken.notify(); 687 } 688 } 689 690 @Override surfaceDestroyed(SurfaceHolder holder)691 public void surfaceDestroyed(SurfaceHolder holder) {} 692 693 } 694 695 /** 696 * Factory for building EGL and GLES that could render to GLSurfaceView. 697 * {@link GLSurfaceView} {@link EGL10} {@link GLES20}. 698 */ 699 @TargetApi(16) 700 class GLSurfaceViewFactory extends VideoViewFactory { 701 702 private static final String TAG = GLSurfaceViewFactory.class.getSimpleName(); 703 private static final String NAME = "GLSurfaceView"; 704 705 private final Object surfaceSyncToken = new Object(); 706 707 private GLSurfaceViewThread glSurfaceViewThread; 708 private boolean byteBufferIsReady = false; 709 GLSurfaceViewFactory()710 public GLSurfaceViewFactory() {} 711 712 @Override release()713 public void release() { 714 glSurfaceViewThread.release(); 715 glSurfaceViewThread = null; 716 } 717 718 @Override getName()719 public String getName() { 720 return NAME; 721 } 722 723 @Override createView(Context context)724 public View createView(Context context) { 725 // Do all GL rendering in the GL thread. 726 glSurfaceViewThread = new GLSurfaceViewThread(); 727 glSurfaceViewThread.start(); 728 // No necessary view to display, return null. 729 return null; 730 } 731 732 @Override waitForViewIsAvailable()733 public void waitForViewIsAvailable() { 734 while (glSurfaceViewThread.getSurface() == null) { 735 synchronized (surfaceSyncToken) { 736 try { 737 surfaceSyncToken.wait(VIEW_AVAILABLE_TIMEOUT_MS); 738 } catch (InterruptedException exception) { 739 Log.e(TAG, "Taking too long for the surface to become available.", exception); 740 } 741 } 742 } 743 } 744 745 @Override getSurface()746 public Surface getSurface() { 747 return glSurfaceViewThread.getSurface(); 748 } 749 750 @Override getVideoViewSnapshot()751 public VideoViewSnapshot getVideoViewSnapshot() { 752 return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT); 753 } 754 byteBufferIsReady()755 public boolean byteBufferIsReady() { 756 return byteBufferIsReady; 757 } 758 getByteBuffer()759 public ByteBuffer getByteBuffer() { 760 return glSurfaceViewThread.getByteBuffer(); 761 } 762 763 /* Does all GL operations. */ 764 private class GLSurfaceViewThread extends Thread 765 implements SurfaceTexture.OnFrameAvailableListener { 766 767 private static final int FLOAT_SIZE_BYTES = 4; 768 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 769 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 770 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 771 private FloatBuffer triangleVertices; 772 private float[] textureTransform = new float[16]; 773 774 private float[] triangleVerticesData = { 775 // X, Y, Z, U, V 776 -1f, -1f, 0f, 0f, 1f, 777 1f, -1f, 0f, 1f, 1f, 778 -1f, 1f, 0f, 0f, 0f, 779 1f, 1f, 0f, 1f, 0f, 780 }; 781 // Make the top-left corner corresponds to texture coordinate 782 // (0, 0). This complies with the transformation matrix obtained from 783 // SurfaceTexture.getTransformMatrix. 784 785 private static final String VERTEX_SHADER = 786 "attribute vec4 aPosition;\n" 787 + "attribute vec4 aTextureCoord;\n" 788 + "uniform mat4 uTextureTransform;\n" 789 + "varying vec2 vTextureCoord;\n" 790 + "void main() {\n" 791 + " gl_Position = aPosition;\n" 792 + " vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n" 793 + "}\n"; 794 795 private static final String FRAGMENT_SHADER = 796 "#extension GL_OES_EGL_image_external : require\n" 797 + "precision mediump float;\n" // highp here doesn't seem to matter 798 + "varying vec2 vTextureCoord;\n" 799 + "uniform samplerExternalOES sTexture;\n" 800 + "void main() {\n" 801 + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" 802 + "}\n"; 803 804 private int glProgram; 805 private int textureID = -1; 806 private int aPositionHandle; 807 private int aTextureHandle; 808 private int uTextureTransformHandle; 809 private EGLDisplay eglDisplay = null; 810 private EGLContext eglContext = null; 811 private EGLSurface eglSurface = null; 812 private EGL10 egl10; 813 private Surface surface = null; 814 private SurfaceTexture surfaceTexture; 815 private ByteBuffer byteBuffer; 816 GLSurfaceViewThread()817 public GLSurfaceViewThread() {} 818 819 @Override run()820 public void run() { 821 Looper.prepare(); 822 triangleVertices = ByteBuffer 823 .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES) 824 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 825 triangleVertices.put(triangleVerticesData).position(0); 826 827 eglSetup(); 828 makeCurrent(); 829 eglSurfaceCreated(); 830 831 surfaceTexture = new SurfaceTexture(getTextureId()); 832 surfaceTexture.setOnFrameAvailableListener(this); 833 surface = new Surface(surfaceTexture); 834 synchronized (surfaceSyncToken) { 835 surfaceSyncToken.notify(); 836 } 837 // Store pixels from surface 838 byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4); 839 byteBuffer.order(ByteOrder.LITTLE_ENDIAN); 840 Looper.loop(); 841 } 842 843 @Override onFrameAvailable(SurfaceTexture st)844 public void onFrameAvailable(SurfaceTexture st) { 845 checkGlError("before updateTexImage"); 846 surfaceTexture.updateTexImage(); 847 st.getTransformMatrix(textureTransform); 848 drawFrame(); 849 saveFrame(); 850 } 851 852 /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */ eglSetup()853 public void eglSetup() { 854 egl10 = (EGL10) EGLContext.getEGL(); 855 eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 856 if (eglDisplay == EGL10.EGL_NO_DISPLAY) { 857 throw new RuntimeException("unable to get egl10 display"); 858 } 859 int[] version = new int[2]; 860 if (!egl10.eglInitialize(eglDisplay, version)) { 861 eglDisplay = null; 862 throw new RuntimeException("unable to initialize egl10"); 863 } 864 // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB. 865 int[] configAttribs = { 866 EGL10.EGL_RED_SIZE, 8, 867 EGL10.EGL_GREEN_SIZE, 8, 868 EGL10.EGL_BLUE_SIZE, 8, 869 EGL10.EGL_ALPHA_SIZE, 8, 870 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 871 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, 872 EGL10.EGL_NONE 873 }; 874 EGLConfig[] configs = new EGLConfig[1]; 875 int[] numConfigs = new int[1]; 876 if (!egl10.eglChooseConfig( 877 eglDisplay, configAttribs, configs, configs.length, numConfigs)) { 878 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 879 } 880 // Configure EGL context for OpenGL ES 2.0. 881 int[] contextAttribs = { 882 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 883 EGL10.EGL_NONE 884 }; 885 eglContext = egl10.eglCreateContext( 886 eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs); 887 checkEglError("eglCreateContext"); 888 if (eglContext == null) { 889 throw new RuntimeException("null context"); 890 } 891 // Create a pbuffer surface. 892 int[] surfaceAttribs = { 893 EGL10.EGL_WIDTH, VIEW_WIDTH, 894 EGL10.EGL_HEIGHT, VIEW_HEIGHT, 895 EGL10.EGL_NONE 896 }; 897 eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs); 898 checkEglError("eglCreatePbufferSurface"); 899 if (eglSurface == null) { 900 throw new RuntimeException("surface was null"); 901 } 902 } 903 release()904 public void release() { 905 if (eglDisplay != EGL10.EGL_NO_DISPLAY) { 906 egl10.eglMakeCurrent(eglDisplay, 907 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 908 egl10.eglDestroySurface(eglDisplay, eglSurface); 909 egl10.eglDestroyContext(eglDisplay, eglContext); 910 egl10.eglTerminate(eglDisplay); 911 } 912 eglDisplay = EGL10.EGL_NO_DISPLAY; 913 eglContext = EGL10.EGL_NO_CONTEXT; 914 eglSurface = EGL10.EGL_NO_SURFACE; 915 surface.release(); 916 surfaceTexture.release(); 917 } 918 919 /* Makes our EGL context and surface current. */ makeCurrent()920 public void makeCurrent() { 921 if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { 922 throw new RuntimeException("eglMakeCurrent failed"); 923 } 924 checkEglError("eglMakeCurrent"); 925 } 926 927 /* Call this after the EGL Surface is created and made current. */ eglSurfaceCreated()928 public void eglSurfaceCreated() { 929 glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 930 if (glProgram == 0) { 931 throw new RuntimeException("failed creating program"); 932 } 933 aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition"); 934 checkLocation(aPositionHandle, "aPosition"); 935 aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord"); 936 checkLocation(aTextureHandle, "aTextureCoord"); 937 uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform"); 938 checkLocation(uTextureTransformHandle, "uTextureTransform"); 939 940 int[] textures = new int[1]; 941 GLES20.glGenTextures(1, textures, 0); 942 checkGlError("glGenTextures"); 943 textureID = textures[0]; 944 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 945 checkGlError("glBindTexture"); 946 947 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 948 GLES20.GL_LINEAR); 949 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 950 GLES20.GL_LINEAR); 951 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 952 GLES20.GL_CLAMP_TO_EDGE); 953 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 954 GLES20.GL_CLAMP_TO_EDGE); 955 checkGlError("glTexParameter"); 956 } 957 drawFrame()958 public void drawFrame() { 959 GLES20.glUseProgram(glProgram); 960 checkGlError("glUseProgram"); 961 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 962 checkGlError("glActiveTexture"); 963 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); 964 checkGlError("glBindTexture"); 965 966 triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 967 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 968 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 969 checkGlError("glVertexAttribPointer aPositionHandle"); 970 GLES20.glEnableVertexAttribArray(aPositionHandle); 971 checkGlError("glEnableVertexAttribArray aPositionHandle"); 972 973 triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 974 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 975 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices); 976 checkGlError("glVertexAttribPointer aTextureHandle"); 977 GLES20.glEnableVertexAttribArray(aTextureHandle); 978 checkGlError("glEnableVertexAttribArray aTextureHandle"); 979 980 GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, 981 textureTransform, 0); 982 checkGlError("glUniformMatrix uTextureTransformHandle"); 983 984 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 985 checkGlError("glDrawArrays"); 986 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 987 } 988 989 /* Reads the pixels to a ByteBuffer. */ saveFrame()990 public void saveFrame() { 991 byteBufferIsReady = false; 992 byteBuffer.clear(); 993 GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA, 994 GLES20.GL_UNSIGNED_BYTE, byteBuffer); 995 byteBufferIsReady = true; 996 } 997 getTextureId()998 public int getTextureId() { 999 return textureID; 1000 } 1001 getSurface()1002 public Surface getSurface() { 1003 return surface; 1004 } 1005 getByteBuffer()1006 public ByteBuffer getByteBuffer() { 1007 return byteBuffer; 1008 } 1009 loadShader(int shaderType, String source)1010 private int loadShader(int shaderType, String source) { 1011 int shader = GLES20.glCreateShader(shaderType); 1012 checkGlError("glCreateShader type=" + shaderType); 1013 GLES20.glShaderSource(shader, source); 1014 GLES20.glCompileShader(shader); 1015 int[] compiled = new int[1]; 1016 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 1017 1018 if (compiled[0] == 0) { 1019 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 1020 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 1021 GLES20.glDeleteShader(shader); 1022 shader = 0; 1023 } 1024 return shader; 1025 } 1026 createProgram(String vertexSource, String fragmentSource)1027 private int createProgram(String vertexSource, String fragmentSource) { 1028 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 1029 if (vertexShader == 0) { 1030 return 0; 1031 } 1032 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 1033 if (pixelShader == 0) { 1034 return 0; 1035 } 1036 int program = GLES20.glCreateProgram(); 1037 if (program == 0) { 1038 Log.e(TAG, "Could not create program"); 1039 } 1040 GLES20.glAttachShader(program, vertexShader); 1041 checkGlError("glAttachShader"); 1042 GLES20.glAttachShader(program, pixelShader); 1043 checkGlError("glAttachShader"); 1044 GLES20.glLinkProgram(program); 1045 int[] linkStatus = new int[1]; 1046 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 1047 1048 if (linkStatus[0] != GLES20.GL_TRUE) { 1049 Log.e(TAG, "Could not link program: "); 1050 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 1051 GLES20.glDeleteProgram(program); 1052 program = 0; 1053 } 1054 return program; 1055 } 1056 checkEglError(String msg)1057 private void checkEglError(String msg) { 1058 int error; 1059 if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) { 1060 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 1061 } 1062 } 1063 checkGlError(String op)1064 public void checkGlError(String op) { 1065 int error; 1066 if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1067 Log.e(TAG, op + ": glError " + error); 1068 throw new RuntimeException(op + ": glError " + error); 1069 } 1070 } 1071 checkLocation(int location, String label)1072 public void checkLocation(int location, String label) { 1073 if (location < 0) { 1074 throw new RuntimeException("Unable to locate '" + label + "' in program"); 1075 } 1076 } 1077 } 1078 1079 } 1080 1081 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */ 1082 abstract class VideoViewSnapshot implements Runnable { 1083 getBitmap()1084 public abstract Bitmap getBitmap(); 1085 isBitmapReady()1086 public abstract boolean isBitmapReady(); 1087 1088 } 1089 1090 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. */ 1091 class TextureViewSnapshot extends VideoViewSnapshot { 1092 1093 private final TextureView tv; 1094 private Bitmap bitmap = null; 1095 TextureViewSnapshot(TextureView tv)1096 public TextureViewSnapshot(TextureView tv) { 1097 this.tv = DecodeAccuracyTestBase.checkNotNull(tv); 1098 } 1099 1100 @Override run()1101 public synchronized void run() { 1102 bitmap = tv.getBitmap(); 1103 } 1104 1105 @Override getBitmap()1106 public Bitmap getBitmap() { 1107 return bitmap; 1108 } 1109 1110 @Override isBitmapReady()1111 public boolean isBitmapReady() { 1112 return bitmap != null; 1113 } 1114 1115 } 1116 1117 /** 1118 * Method to get bitmap of a {@link SurfaceView}. 1119 */ 1120 class SurfaceViewSnapshot extends VideoViewSnapshot { 1121 1122 private static final String TAG = SurfaceViewSnapshot.class.getSimpleName(); 1123 private static final int PIXELCOPY_REQUEST_SLEEP_MS = 30; 1124 private static final int PIXELCOPY_REQUEST_MAX_ATTEMPTS = 20; 1125 private static final int PIXELCOPY_TIMEOUT_MS = 1000; 1126 1127 private final Thread copyThread; 1128 private Bitmap bitmap; 1129 private int copyResult; 1130 SurfaceViewSnapshot(final SurfaceView surfaceView, final int width, final int height)1131 public SurfaceViewSnapshot(final SurfaceView surfaceView, final int width, final int height) { 1132 this.copyResult = -1; 1133 this.copyThread = new Thread(new Runnable() { 1134 @Override 1135 public void run() { 1136 SynchronousPixelCopy copyHelper = new SynchronousPixelCopy(); 1137 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); 1138 try { 1139 // Wait for SurfaceView to be available. 1140 for (int i = 0; i < PIXELCOPY_REQUEST_MAX_ATTEMPTS; i++) { 1141 copyResult = copyHelper.request(surfaceView, bitmap); 1142 if (copyResult == PixelCopy.SUCCESS) { 1143 break; 1144 } 1145 Thread.sleep(PIXELCOPY_REQUEST_SLEEP_MS); 1146 } 1147 } catch (InterruptedException ex) { 1148 Log.w(TAG, "Pixel Copy is stopped/interrupted before it finishes", ex); 1149 } 1150 copyHelper.release(); 1151 } 1152 }); 1153 copyThread.start(); 1154 } 1155 1156 @Override run()1157 public synchronized void run() {} 1158 1159 @Override getBitmap()1160 public Bitmap getBitmap() { 1161 return bitmap; 1162 } 1163 1164 @Override isBitmapReady()1165 public boolean isBitmapReady() { 1166 return copyResult == PixelCopy.SUCCESS; 1167 } 1168 release()1169 public void release() { 1170 if (copyThread.isAlive()) { 1171 copyThread.interrupt(); 1172 } 1173 } 1174 1175 private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener { 1176 1177 private final Handler handler; 1178 private final HandlerThread thread; 1179 1180 private int status = -1; 1181 SynchronousPixelCopy()1182 public SynchronousPixelCopy() { 1183 this.thread = new HandlerThread("PixelCopyHelper"); 1184 thread.start(); 1185 this.handler = new Handler(thread.getLooper()); 1186 } 1187 release()1188 public void release() { 1189 thread.quit(); 1190 } 1191 request(SurfaceView source, Bitmap dest)1192 public int request(SurfaceView source, Bitmap dest) { 1193 synchronized (this) { 1194 PixelCopy.request(source, dest, this, handler); 1195 return getResultLocked(); 1196 } 1197 } 1198 getResultLocked()1199 private int getResultLocked() { 1200 try { 1201 this.wait(PIXELCOPY_TIMEOUT_MS); 1202 } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ } 1203 return status; 1204 } 1205 1206 @Override onPixelCopyFinished(int copyResult)1207 public void onPixelCopyFinished(int copyResult) { 1208 synchronized (this) { 1209 status = copyResult; 1210 this.notify(); 1211 } 1212 } 1213 1214 } 1215 1216 } 1217 1218 /** 1219 * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler. 1220 * Note, because of how the bitmap is captured in GLSurfaceView, 1221 * this method does not have to be a runnable. 1222 */ 1223 class GLSurfaceViewSnapshot extends VideoViewSnapshot { 1224 1225 private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName(); 1226 private static final int GET_BYTEBUFFER_SLEEP_MS = 30; 1227 private static final int GET_BYTEBUFFER_MAX_ATTEMPTS = 20; 1228 1229 private final GLSurfaceViewFactory glSurfaceViewFactory; 1230 private final int width; 1231 private final int height; 1232 1233 private Bitmap bitmap = null; 1234 private boolean bitmapIsReady = false; 1235 GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1236 public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) { 1237 this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory); 1238 this.width = width; 1239 this.height = height; 1240 } 1241 1242 @Override run()1243 public synchronized void run() { 1244 try { 1245 waitForByteBuffer(); 1246 } catch (InterruptedException exception) { 1247 Log.w(TAG, exception.getMessage()); 1248 Log.w(TAG, "ByteBuffer may contain incorrect pixels."); 1249 } 1250 // Get ByteBuffer anyway. Let the test fail if ByteBuffer contains incorrect pixels. 1251 ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer(); 1252 bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 1253 byteBuffer.rewind(); 1254 bitmap.copyPixelsFromBuffer(byteBuffer); 1255 bitmapIsReady = true; 1256 } 1257 1258 @Override getBitmap()1259 public Bitmap getBitmap() { 1260 return bitmap; 1261 } 1262 1263 @Override isBitmapReady()1264 public boolean isBitmapReady() { 1265 return bitmapIsReady; 1266 } 1267 waitForByteBuffer()1268 public void waitForByteBuffer() throws InterruptedException { 1269 // Wait for byte buffer to be ready. 1270 for (int i = 0; i < GET_BYTEBUFFER_MAX_ATTEMPTS; i++) { 1271 if (glSurfaceViewFactory.byteBufferIsReady()) { 1272 return; 1273 } 1274 Thread.sleep(GET_BYTEBUFFER_SLEEP_MS); 1275 } 1276 throw new InterruptedException("Taking too long to read pixels into a ByteBuffer."); 1277 } 1278 1279 } 1280 1281 /* Stores information of a video. */ 1282 class VideoFormat { 1283 1284 public static final int UNSET = -1; 1285 public static final String MIMETYPE_UNSET = "UNSET"; 1286 public static final String MIMETYPE_KEY = "mimeType"; 1287 public static final String WIDTH_KEY = "width"; 1288 public static final String HEIGHT_KEY = "height"; 1289 public static final String FRAMERATE_KEY = "frameRate"; 1290 1291 private final String filename; 1292 private Uri uri; 1293 private String mimeType = MIMETYPE_UNSET; 1294 private int width = UNSET; 1295 private int height = UNSET; 1296 private int maxWidth = UNSET; 1297 private int maxHeight = UNSET; 1298 private int originalWidth = UNSET; 1299 private int originalHeight = UNSET; 1300 VideoFormat(String filename, Uri uri)1301 public VideoFormat(String filename, Uri uri) { 1302 this.filename = filename; 1303 this.uri = uri; 1304 } 1305 VideoFormat(String filename)1306 public VideoFormat(String filename) { 1307 this(filename, null); 1308 } 1309 VideoFormat(VideoFormat videoFormat)1310 public VideoFormat(VideoFormat videoFormat) { 1311 this(videoFormat.filename, videoFormat.uri); 1312 } 1313 loadUri(Context context)1314 public Uri loadUri(Context context) { 1315 uri = createCacheFile(context); 1316 return uri; 1317 } 1318 getUri()1319 public Uri getUri() { 1320 return uri; 1321 } 1322 getFilename()1323 public String getFilename() { 1324 return filename; 1325 } 1326 setMimeType(String mimeType)1327 public void setMimeType(String mimeType) { 1328 this.mimeType = mimeType; 1329 } 1330 getMimeType()1331 public String getMimeType() { 1332 return mimeType; 1333 } 1334 setWidth(int width)1335 public void setWidth(int width) { 1336 this.width = width; 1337 if (this.originalWidth == UNSET) { 1338 this.originalWidth = width; 1339 } 1340 } 1341 setMaxWidth(int maxWidth)1342 public void setMaxWidth(int maxWidth) { 1343 this.maxWidth = maxWidth; 1344 } 1345 getWidth()1346 public int getWidth() { 1347 return width; 1348 } 1349 getMaxWidth()1350 public int getMaxWidth() { 1351 return maxWidth; 1352 } 1353 getOriginalWidth()1354 public int getOriginalWidth() { 1355 return originalWidth; 1356 } 1357 setHeight(int height)1358 public void setHeight(int height) { 1359 this.height = height; 1360 if (this.originalHeight == UNSET) { 1361 this.originalHeight = height; 1362 } 1363 } 1364 setMaxHeight(int maxHeight)1365 public void setMaxHeight(int maxHeight) { 1366 this.maxHeight = maxHeight; 1367 } 1368 getHeight()1369 public int getHeight() { 1370 return height; 1371 } 1372 getMaxHeight()1373 public int getMaxHeight() { 1374 return maxHeight; 1375 } 1376 getOriginalHeight()1377 public int getOriginalHeight() { 1378 return originalHeight; 1379 } 1380 createCacheFile(Context context)1381 private Uri createCacheFile(Context context) { 1382 try { 1383 File cacheFile = new File(context.getCacheDir(), filename); 1384 if (cacheFile.createNewFile() == false) { 1385 cacheFile.delete(); 1386 cacheFile.createNewFile(); 1387 } 1388 InputStream inputStream = context.getAssets().open(filename); 1389 FileOutputStream fileOutputStream = new FileOutputStream(cacheFile); 1390 final int bufferSize = 1024 * 512; 1391 byte[] buffer = new byte[bufferSize]; 1392 1393 while (inputStream.read(buffer) != -1) { 1394 fileOutputStream.write(buffer, 0, bufferSize); 1395 } 1396 fileOutputStream.close(); 1397 inputStream.close(); 1398 return Uri.fromFile(cacheFile); 1399 } catch (IOException e) { 1400 e.printStackTrace(); 1401 return null; 1402 } 1403 } 1404 1405 } 1406 1407 /** 1408 * Compares bitmaps to determine if they are similar. 1409 * 1410 * <p>To determine greatest pixel difference we transform each pixel into the 1411 * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences. 1412 */ 1413 class BitmapCompare { 1414 1415 private static final int RED = 0; 1416 private static final int GREEN = 1; 1417 private static final int BLUE = 2; 1418 private static final int X = 0; 1419 private static final int Y = 1; 1420 private static final int Z = 2; 1421 1422 private static SparseArray<double[]> pixelTransformCache = new SparseArray<>(); 1423 BitmapCompare()1424 private BitmapCompare() {} 1425 1426 /** 1427 * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity. 1428 * 1429 * @param bitmap1 A bitmap to compare to bitmap2. 1430 * @param bitmap2 A bitmap to compare to bitmap1. 1431 * @return A {@link Difference} with an integer describing the greatest pixel difference, 1432 * using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional 1433 * {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate 1434 * where it was first found. 1435 */ 1436 @TargetApi(12) computeDifference(Bitmap bitmap1, Bitmap bitmap2)1437 public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) { 1438 if ((bitmap1 == null || bitmap2 == null) && bitmap1 != bitmap2) { 1439 return new Difference(Integer.MAX_VALUE); 1440 } 1441 if (bitmap1 == bitmap2 || bitmap1.sameAs(bitmap2)) { 1442 return new Difference(0); 1443 } 1444 if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) { 1445 return new Difference(Integer.MAX_VALUE); 1446 } 1447 // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using 1448 // euclidean distance formula. 1449 final double[][] pixels1 = convertRgbToCieLab(bitmap1); 1450 final double[][] pixels2 = convertRgbToCieLab(bitmap2); 1451 int greatestDifference = 0; 1452 int greatestDifferenceIndex = -1; 1453 for (int i = 0; i < pixels1.length; i++) { 1454 final int difference = euclideanDistance(pixels1[i], pixels2[i]); 1455 if (difference > greatestDifference) { 1456 greatestDifference = difference; 1457 greatestDifferenceIndex = i; 1458 } 1459 } 1460 return new Difference(greatestDifference, Pair.create( 1461 greatestDifferenceIndex % bitmap1.getWidth(), 1462 greatestDifferenceIndex / bitmap1.getHeight())); 1463 } 1464 convertRgbToCieLab(Bitmap bitmap)1465 private static double[][] convertRgbToCieLab(Bitmap bitmap) { 1466 final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3]; 1467 final int pixels[] = new int[bitmap.getHeight() * bitmap.getWidth()]; 1468 bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 1469 for (int i = 0; i < pixels.length; i++) { 1470 final double[] transformedColor = pixelTransformCache.get(pixels[i]); 1471 if (transformedColor != null) { 1472 result[i] = transformedColor; 1473 } else { 1474 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i])); 1475 pixelTransformCache.put(pixels[i], result[i]); 1476 } 1477 } 1478 return result; 1479 } 1480 1481 /** 1482 * Conversion from RGB to XYZ based algorithm as defined by: 1483 * http://www.easyrgb.com/index.php?X=MATH&H=02#text2 1484 * 1485 * <p><pre>{@code 1486 * var_R = ( R / 255 ) //R from 0 to 255 1487 * var_G = ( G / 255 ) //G from 0 to 255 1488 * var_B = ( B / 255 ) //B from 0 to 255 1489 * 1490 * if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4 1491 * else var_R = var_R / 12.92 1492 * if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4 1493 * else var_G = var_G / 12.92 1494 * if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4 1495 * else var_B = var_B / 12.92 1496 * 1497 * var_R = var_R * 100 1498 * var_G = var_G * 100 1499 * var_B = var_B * 100 1500 * 1501 * // Observer. = 2°, Illuminant = D65 1502 * X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805 1503 * Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722 1504 * Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505 1505 * }</pre> 1506 * 1507 * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue. 1508 * @return An array of doubles where each value is a component of the XYZ color space. 1509 */ convertRgbToXyz(int rgbColor)1510 private static double[] convertRgbToXyz(int rgbColor) { 1511 final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)}; 1512 1513 for (int i = 0; i < comp.length; i++) { 1514 comp[i] /= 255.0; 1515 if (comp[i] > 0.04045) { 1516 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4); 1517 } else { 1518 comp[i] /= 12.92; 1519 } 1520 comp[i] *= 100; 1521 } 1522 final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805); 1523 final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722); 1524 final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505); 1525 return new double[] {x, y, z}; 1526 } 1527 1528 /** 1529 * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by: 1530 * http://www.easyrgb.com/index.php?X=MATH&H=07#text7 1531 * 1532 * <p><pre> 1533 * {@code 1534 * var_X = X / ref_X //ref_X = 95.047 Observer= 2°, Illuminant= D65 1535 * var_Y = Y / ref_Y //ref_Y = 100.000 1536 * var_Z = Z / ref_Z //ref_Z = 108.883 1537 * 1538 * if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 ) 1539 * else var_X = ( 7.787 * var_X ) + ( 16 / 116 ) 1540 * if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 ) 1541 * else var_Y = ( 7.787 * var_Y ) + ( 16 / 116 ) 1542 * if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 ) 1543 * else var_Z = ( 7.787 * var_Z ) + ( 16 / 116 ) 1544 * 1545 * CIE-L* = ( 116 * var_Y ) - 16 1546 * CIE-a* = 500 * ( var_X - var_Y ) 1547 * CIE-b* = 200 * ( var_Y - var_Z ) 1548 * } 1549 * </pre> 1550 * 1551 * @param comp An array of doubles where each value is a component of the XYZ color space. 1552 * @return An array of doubles where each value is a component of the CIE-L*a*b* color space. 1553 */ convertXyzToCieLab(double[] comp)1554 private static double[] convertXyzToCieLab(double[] comp) { 1555 comp[X] /= 95.047; 1556 comp[Y] /= 100.0; 1557 comp[Z] /= 108.883; 1558 1559 for (int i = 0; i < comp.length; i++) { 1560 if (comp[i] > 0.008856) { 1561 comp[i] = Math.pow(comp[i], (1.0 / 3.0)); 1562 } else { 1563 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0); 1564 } 1565 } 1566 final double l = (116 * comp[Y]) - 16; 1567 final double a = 500 * (comp[X] - comp[Y]); 1568 final double b = 200 * (comp[Y] - comp[Z]); 1569 return new double[] {l, a, b}; 1570 } 1571 euclideanDistance(double[] p1, double[] p2)1572 private static int euclideanDistance(double[] p1, double[] p2) { 1573 if (p1.length != p2.length) { 1574 return Integer.MAX_VALUE; 1575 } 1576 double result = 0; 1577 for (int i = 0; i < p1.length; i++) { 1578 result += Math.pow(p1[i] - p2[i], 2); 1579 } 1580 return (int) Math.round(Math.sqrt(result)); 1581 } 1582 1583 /** 1584 * Crops the border of the array representing an image by hBorderSize 1585 * pixels on the left and right borders, and by vBorderSize pixels on the 1586 * top and bottom borders (so the width is 2 * hBorderSize smaller and 1587 * the height is 2 * vBorderSize smaller), then scales the image up to 1588 * match the original size using bilinear interpolation. 1589 */ shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1590 private static Bitmap shrinkAndScaleBilinear( 1591 Bitmap input, double hBorderSize, double vBorderSize) { 1592 1593 int width = input.getWidth(); 1594 int height = input.getHeight(); 1595 1596 // Compute the proper step sizes 1597 double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1); 1598 double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1); 1599 1600 // Read the input bitmap into RGB arrays. 1601 int[] inputPixels = new int[width * height]; 1602 input.getPixels(inputPixels, 0, width, 0, 0, width, height); 1603 int[][] inputRgb = new int[width * height][3]; 1604 for (int i = 0; i < width * height; ++i) { 1605 inputRgb[i][0] = Color.red(inputPixels[i]); 1606 inputRgb[i][1] = Color.green(inputPixels[i]); 1607 inputRgb[i][2] = Color.blue(inputPixels[i]); 1608 } 1609 inputPixels = null; 1610 1611 // Prepare the output buffer. 1612 int[] outputPixels = new int[width * height]; 1613 1614 // Start the iteration. The first y coordinate is vBorderSize. 1615 double y = vBorderSize; 1616 for (int yIndex = 0; yIndex < height; ++yIndex) { 1617 // The first x coordinate is hBorderSize. 1618 double x = hBorderSize; 1619 for (int xIndex = 0; xIndex < width; ++xIndex) { 1620 // Determine the square of interest. 1621 int left = (int)x; // This is floor(x). 1622 int top = (int)y; // This is floor(y). 1623 int right = left + 1; 1624 int bottom = top + 1; 1625 1626 // (u, v) is the fractional part of (x, y). 1627 double u = x - (double)left; 1628 double v = y - (double)top; 1629 1630 // Precompute necessary products to save time. 1631 double p00 = (1.0 - u) * (1.0 - v); 1632 double p01 = (1.0 - u) * v; 1633 double p10 = u * (1.0 - v); 1634 double p11 = u * v; 1635 1636 // Clamp the indices to prevent out-of-bound that may be caused 1637 // by round-off error. 1638 if (left >= width) left = width - 1; 1639 if (top >= height) top = height - 1; 1640 if (right >= width) right = width - 1; 1641 if (bottom >= height) bottom = height - 1; 1642 1643 // Sample RGB values from the four corners. 1644 int[] rgb00 = inputRgb[top * width + left]; 1645 int[] rgb01 = inputRgb[bottom * width + left]; 1646 int[] rgb10 = inputRgb[top * width + right]; 1647 int[] rgb11 = inputRgb[bottom * width + right]; 1648 1649 // Interpolate each component of RGB separately. 1650 int[] mixedColor = new int[3]; 1651 for (int k = 0; k < 3; ++k) { 1652 mixedColor[k] = (int)Math.round( 1653 p00 * (double) rgb00[k] + p01 * (double) rgb01[k] 1654 + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]); 1655 } 1656 // Convert RGB to bitmap Color format and store. 1657 outputPixels[yIndex * width + xIndex] = Color.rgb( 1658 mixedColor[0], mixedColor[1], mixedColor[2]); 1659 x += xInc; 1660 } 1661 y += yInc; 1662 } 1663 // Assemble the output buffer into a Bitmap object. 1664 return Bitmap.createBitmap(outputPixels, width, height, input.getConfig()); 1665 } 1666 1667 /** 1668 * Calls computeDifference on multiple cropped-and-scaled versions of 1669 * bitmap2. 1670 */ 1671 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops)1672 public static Difference computeMinimumDifference( 1673 Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) { 1674 1675 // Compute the difference with the original image (bitmap2) first. 1676 Difference minDiff = computeDifference(bitmap1, bitmap2); 1677 // Then go through the list of borderCrops. 1678 for (Pair<Double, Double> borderCrop : borderCrops) { 1679 // Compute the difference between bitmap1 and a transformed 1680 // version of bitmap2. 1681 Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second); 1682 Difference d = computeDifference(bitmap1, bitmap2s); 1683 // Keep the minimum difference. 1684 if (d.greatestPixelDifference < minDiff.greatestPixelDifference) { 1685 minDiff = d; 1686 minDiff.bestMatchBorderCrop = borderCrop; 1687 } 1688 } 1689 return minDiff; 1690 } 1691 1692 /** 1693 * Calls computeMinimumDifference on a default list of borderCrop. 1694 */ 1695 @TargetApi(12) computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight)1696 public static Difference computeMinimumDifference( 1697 Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) { 1698 1699 double hBorder = (double) bitmap1.getWidth() / (double) trueWidth; 1700 double vBorder = (double) bitmap1.getHeight() / (double) trueHeight; 1701 double hBorderH = 0.5 * hBorder; // Half-texel horizontal border 1702 double vBorderH = 0.5 * vBorder; // Half-texel vertical border 1703 return computeMinimumDifference( 1704 bitmap1, 1705 bitmap2, 1706 new Pair[] { 1707 Pair.create(hBorderH, 0.0), 1708 Pair.create(hBorderH, vBorderH), 1709 Pair.create(0.0, vBorderH), 1710 Pair.create(hBorder, 0.0), 1711 Pair.create(hBorder, vBorder), 1712 Pair.create(0.0, vBorder) 1713 }); 1714 // This default list of borderCrop comes from the behavior of 1715 // GLConsumer.computeTransformMatrix(). 1716 } 1717 1718 /* Describes the difference between two {@link Bitmap} instances. */ 1719 public static final class Difference { 1720 1721 public final int greatestPixelDifference; 1722 public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates; 1723 public Pair<Double, Double> bestMatchBorderCrop; 1724 Difference(int greatestPixelDifference)1725 private Difference(int greatestPixelDifference) { 1726 this(greatestPixelDifference, null, Pair.create(0.0, 0.0)); 1727 } 1728 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)1729 private Difference( 1730 int greatestPixelDifference, 1731 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) { 1732 this(greatestPixelDifference, greatestPixelDifferenceCoordinates, 1733 Pair.create(0.0, 0.0)); 1734 } 1735 Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)1736 private Difference( 1737 int greatestPixelDifference, 1738 Pair<Integer, Integer> greatestPixelDifferenceCoordinates, 1739 Pair<Double, Double> bestMatchBorderCrop) { 1740 this.greatestPixelDifference = greatestPixelDifference; 1741 this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates; 1742 this.bestMatchBorderCrop = bestMatchBorderCrop; 1743 } 1744 } 1745 1746 } 1747 1748 /* Wrapper for MIME types. */ 1749 final class MimeTypes { 1750 MimeTypes()1751 private MimeTypes() {} 1752 1753 public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9"; 1754 public static final String VIDEO_H264 = "video/avc"; 1755 isVideo(String mimeType)1756 public static boolean isVideo(String mimeType) { 1757 return mimeType.startsWith("video"); 1758 } 1759 1760 } 1761