1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.cts;
17 
18 import android.media.cts.R;
19 
20 import static org.junit.Assert.assertNotNull;
21 
22 import com.android.compatibility.common.util.ApiLevelUtil;
23 
24 import android.annotation.TargetApi;
25 import android.annotation.SuppressLint;
26 import android.app.Activity;
27 import android.content.Context;
28 import android.content.Intent;
29 import android.content.pm.ActivityInfo;
30 import android.content.res.AssetFileDescriptor;
31 import android.content.res.Configuration;
32 import android.content.res.Resources;
33 import android.graphics.Bitmap;
34 import android.graphics.Bitmap.Config;
35 import android.graphics.BitmapFactory;
36 import android.graphics.Color;
37 import android.graphics.SurfaceTexture;
38 import android.media.MediaCodec;
39 import android.media.MediaCodec.BufferInfo;
40 import android.media.MediaCodec.CodecException;
41 import android.media.MediaCodecList;
42 import android.media.MediaExtractor;
43 import android.media.MediaFormat;
44 import android.net.Uri;
45 import android.opengl.EGL14;
46 import android.opengl.GLES11Ext;
47 import android.opengl.GLES20;
48 import android.opengl.GLSurfaceView;
49 import android.os.Build;
50 import android.os.Handler;
51 import android.os.HandlerThread;
52 import android.os.Looper;
53 import android.os.SystemClock;
54 import android.support.test.rule.ActivityTestRule;
55 import android.util.Log;
56 import android.util.Pair;
57 import android.util.SparseArray;
58 import android.view.PixelCopy;
59 import android.view.PixelCopy.OnPixelCopyFinishedListener;
60 import android.view.Surface;
61 import android.view.SurfaceHolder;
62 import android.view.SurfaceView;
63 import android.view.TextureView;
64 import android.view.View;
65 import android.view.ViewGroup;
66 import android.widget.RelativeLayout;
67 
68 import java.io.File;
69 import java.io.IOException;
70 import java.nio.ByteBuffer;
71 import java.nio.ByteOrder;
72 import java.nio.FloatBuffer;
73 import java.util.concurrent.TimeUnit;
74 import java.util.HashMap;
75 
76 import javax.microedition.khronos.egl.EGL10;
77 import javax.microedition.khronos.egl.EGLConfig;
78 import javax.microedition.khronos.egl.EGLContext;
79 import javax.microedition.khronos.egl.EGLDisplay;
80 import javax.microedition.khronos.egl.EGLSurface;
81 
82 import org.junit.After;
83 import org.junit.Before;
84 import org.junit.Rule;
85 
86 @TargetApi(16)
87 public class DecodeAccuracyTestBase {
88 
89     protected Context mContext;
90     protected Resources mResources;
91     protected DecodeAccuracyTestActivity mActivity;
92     protected TestHelper testHelper;
93 
94     @Rule
95     public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule =
96             new ActivityTestRule<>(DecodeAccuracyTestActivity.class);
97 
98     @Before
setUp()99     public void setUp() throws Exception {
100         mActivity = mActivityRule.getActivity();
101         mContext = mActivity.getApplicationContext();
102         mResources = mActivity.getResources();
103         testHelper = new TestHelper(mContext, mActivity);
104     }
105 
106     @After
tearDown()107     public void tearDown() throws Exception {
108         mActivity = null;
109         mResources = null;
110         mContext = null;
111         mActivityRule = null;
112     }
113 
bringActivityToFront()114     protected void bringActivityToFront() {
115         Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class);
116         intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
117         mActivity.startActivity(intent);
118     }
119 
getHelper()120     protected TestHelper getHelper() {
121         return testHelper;
122     }
123 
checkNotNull(T reference)124     public static <T> T checkNotNull(T reference) {
125         assertNotNull(reference);
126         return reference;
127     }
128 
checkNotNull(String msg, T reference)129     public static <T> T checkNotNull(String msg, T reference) {
130         assertNotNull(msg, reference);
131         return reference;
132     }
133 
134     /* Simple Player that decodes a local video file only. */
135     @TargetApi(16)
136     static class SimplePlayer {
137 
138         public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 10; // 10 FPS
139         public static final int END_OF_STREAM = -1;
140         public static final int DEQUEUE_SUCCESS = 1;
141         public static final int DEQUEUE_FAIL = 0;
142 
143         private static final String TAG = SimplePlayer.class.getSimpleName();
144         private static final int NO_TRACK_INDEX = -3;
145         private static final long DEQUEUE_TIMEOUT_US = 20;
146 
147         private final Context context;
148         private final MediaExtractor extractor;
149         private final String codecName;
150         private MediaCodec decoder;
151         private byte[] outputBytes;
152         private boolean renderToSurface;
153         private MediaCodecList mediaCodecList;
154         private Surface surface;
155 
SimplePlayer(Context context)156         public SimplePlayer(Context context) {
157             this(context, null);
158         }
159 
SimplePlayer(Context context, String codecName)160         public SimplePlayer(Context context, String codecName) {
161             this.context = checkNotNull(context);
162             this.codecName = codecName;
163             this.extractor = new MediaExtractor();
164             this.renderToSurface = false;
165             this.surface = null;
166         }
167 
168         /**
169          * The function play the corresponding file for certain number of frames.
170          *
171          * @param surface is the surface view of decoder output.
172          * @param videoFormat is the format of the video to extract and decode.
173          * @param numOfTotalFrames is the number of Frame wish to play.
174          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
175          * @return {@link PlayerResult} that consists the result.
176          */
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap)177         public PlayerResult decodeVideoFrames(
178                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap) {
179             this.surface = surface;
180             PlayerResult playerResult;
181             if (prepareVideoDecode(videoFormat)) {
182                 if (startDecoder()) {
183                     final long timeout =
184                             Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames * 2;
185                     playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap);
186                 } else {
187                     playerResult = PlayerResult.failToStart();
188                 }
189             } else {
190                 playerResult = new PlayerResult();
191             }
192             release();
193             return new PlayerResult(playerResult);
194         }
195 
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)196         public PlayerResult decodeVideoFrames(
197                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) {
198             return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0);
199         }
200 
decodeVideoFrames(VideoFormat videoFormat, int numOfTotalFrames)201         public PlayerResult decodeVideoFrames(VideoFormat videoFormat, int numOfTotalFrames) {
202             return decodeVideoFrames(null, videoFormat, numOfTotalFrames, 0);
203         }
204 
205         /**
206          * The function sets up the extractor and video decoder with proper format.
207          * This must be called before doing starting up the decoder.
208          */
prepareVideoDecode(VideoFormat videoFormat)209         private boolean prepareVideoDecode(VideoFormat videoFormat) {
210             MediaFormat mediaFormat = prepareExtractor(videoFormat);
211             if (mediaFormat == null) {
212                 return false;
213             }
214             configureVideoFormat(mediaFormat, videoFormat);
215             setRenderToSurface(surface != null);
216             return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat);
217         }
218 
219         /**
220          * Sets up the extractor and gets the {@link MediaFormat} of the track.
221          */
prepareExtractor(VideoFormat videoFormat)222         private MediaFormat prepareExtractor(VideoFormat videoFormat) {
223             if (!setExtractorDataSource(videoFormat)) {
224                 return null;
225             }
226             final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat());
227             if (trackNum == NO_TRACK_INDEX) {
228                 return null;
229             }
230             extractor.selectTrack(trackNum);
231             return extractor.getTrackFormat(trackNum);
232         }
233 
234         /**
235          * The function decode video frames and display in a surface.
236          *
237          * @param numOfTotalFrames is the number of frames to be decoded.
238          * @param timeOutMs is the time limit for decoding the frames.
239          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
240          * @return {@link PlayerResult} that consists the result.
241          */
decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)242         private PlayerResult decodeFramesAndPlay(
243                 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) {
244             int numOfDecodedFrames = 0;
245             long firstOutputTimeMs = 0;
246             long lastFrameAt = 0;
247             final long loopStart = SystemClock.elapsedRealtime();
248 
249             while (numOfDecodedFrames < numOfTotalFrames
250                     && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) {
251                 try {
252                     queueDecoderInputBuffer();
253                 } catch (IllegalStateException exception) {
254                     Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception);
255                     break;
256                 }
257                 try {
258                     final int outputResult = dequeueDecoderOutputBuffer();
259                     if (outputResult == SimplePlayer.END_OF_STREAM) {
260                         break;
261                     }
262                     if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) {
263                         if (firstOutputTimeMs == 0) {
264                             firstOutputTimeMs = SystemClock.elapsedRealtime();
265                         }
266                         if (msPerFrameCap > 0) {
267                             // Slow down if cap is set and not reached.
268                             final long delayMs =
269                                     msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt);
270                             if (lastFrameAt != 0 && delayMs > 0) {
271                                 final long threadDelayMs = 3; // In case of delay in thread.
272                                 if (delayMs > threadDelayMs) {
273                                     try {
274                                         Thread.sleep(delayMs - threadDelayMs);
275                                     } catch (InterruptedException ex) { /* */}
276                                 }
277                                 while (SystemClock.elapsedRealtime() - lastFrameAt
278                                         < msPerFrameCap) { /* */ }
279                             }
280                             lastFrameAt = SystemClock.elapsedRealtime();
281                         }
282                         numOfDecodedFrames++;
283                     }
284                 } catch (IllegalStateException exception) {
285                     Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception);
286                 }
287             }
288             final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs;
289             return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime);
290         }
291 
292         /**
293          * Queues the input buffer with the media file one buffer at a time.
294          *
295          * @return true if success, fail otherwise.
296          */
queueDecoderInputBuffer()297         private boolean queueDecoderInputBuffer() {
298             ByteBuffer inputBuffer;
299             final ByteBuffer[] inputBufferArray = decoder.getInputBuffers();
300             final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
301             if (inputBufferIndex >= 0) {
302                 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
303                     inputBuffer = inputBufferArray[inputBufferIndex];
304                 } else {
305                     inputBuffer = decoder.getInputBuffer(inputBufferIndex);
306                 }
307                 final int sampleSize = extractor.readSampleData(inputBuffer, 0);
308                 if (sampleSize > 0) {
309                     decoder.queueInputBuffer(
310                             inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0);
311                     extractor.advance();
312                 }
313                 return true;
314             }
315             return false;
316         }
317 
318         /**
319          * Dequeues the output buffer.
320          * For video decoder, renders to surface if provided.
321          * For audio decoder, gets the bytes from the output buffer.
322          *
323          * @return an integer indicating its status (fail, success, or end of stream).
324          */
dequeueDecoderOutputBuffer()325         private int dequeueDecoderOutputBuffer() {
326             final BufferInfo info = new BufferInfo();
327             final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US);
328             if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
329                 return END_OF_STREAM;
330             }
331             if (decoderStatus >= 0) {
332                 // For JELLY_BEAN_MR2- devices, when rendering to a surface,
333                 // info.size seems to always return 0 even if
334                 // the decoder successfully decoded the frame.
335                 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) {
336                     return DEQUEUE_FAIL;
337                 }
338                 if (!renderToSurface) {
339                     ByteBuffer outputBuffer;
340                     if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
341                         outputBuffer = decoder.getOutputBuffers()[decoderStatus];
342                     } else {
343                         outputBuffer = decoder.getOutputBuffer(decoderStatus);
344                     }
345                     outputBytes = new byte[info.size];
346                     outputBuffer.get(outputBytes);
347                     outputBuffer.clear();
348                 }
349                 decoder.releaseOutputBuffer(decoderStatus, renderToSurface);
350                 return DEQUEUE_SUCCESS;
351             }
352             return DEQUEUE_FAIL;
353         }
354 
release()355         private void release() {
356             decoderRelease();
357             extractorRelease();
358         }
359 
setExtractorDataSource(VideoFormat videoFormat)360         private boolean setExtractorDataSource(VideoFormat videoFormat) {
361             checkNotNull(videoFormat);
362             try {
363                 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor(context);
364                 extractor.setDataSource(
365                         afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
366                 afd.close();
367             } catch (IOException exception) {
368                 Log.e(TAG, "IOException in setDataSource", exception);
369                 return false;
370             }
371             return true;
372         }
373 
374         /**
375          * Creates a decoder based on conditions.
376          *
377          * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used.
378          * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)}
379          * is preferred on LOLLIPOP and up for finding out the codec name that
380          * supports the media format.
381          * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used.
382          */
createDecoder(MediaFormat mediaFormat)383         private boolean createDecoder(MediaFormat mediaFormat) {
384             try {
385                 if (codecName != null) {
386                     decoder = MediaCodec.createByCodecName(codecName);
387                 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) {
388                     if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) {
389                         // On LOLLIPOP, format must not contain a frame rate.
390                         mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null);
391                     }
392                     if (mediaCodecList == null) {
393                         mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
394                     }
395                     decoder = MediaCodec.createByCodecName(
396                             mediaCodecList.findDecoderForFormat(mediaFormat));
397                 } else {
398                     decoder = MediaCodec.createDecoderByType(
399                             mediaFormat.getString(MediaFormat.KEY_MIME));
400                 }
401             } catch (Exception exception) {
402                 Log.e(TAG, "Exception during decoder creation", exception);
403                 decoderRelease();
404                 return false;
405             }
406             return true;
407         }
408 
configureDecoder(Surface surface, MediaFormat mediaFormat)409         private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) {
410             try {
411                 decoder.configure(mediaFormat, surface, null, 0);
412             } catch (Exception exception) {
413                 Log.e(TAG, "Exception during decoder configuration", exception);
414                 try {
415                     decoder.reset();
416                 } catch (Exception resetException) {
417                     Log.e(TAG, "Exception during decoder reset", resetException);
418                 }
419                 decoderRelease();
420                 return false;
421             }
422             return true;
423         }
424 
setRenderToSurface(boolean render)425         private void setRenderToSurface(boolean render) {
426             this.renderToSurface = render;
427         }
428 
startDecoder()429         private boolean startDecoder() {
430             try {
431                 decoder.start();
432             } catch (Exception exception) {
433                 Log.e(TAG, "Exception during decoder start", exception);
434                 decoder.reset();
435                 decoderRelease();
436                 return false;
437             }
438             return true;
439         }
440 
decoderRelease()441         private void decoderRelease() {
442             if (decoder == null) {
443                 return;
444             }
445             try {
446                 decoder.stop();
447             } catch (IllegalStateException exception) {
448                 decoder.reset();
449                 // IllegalStateException happens when decoder fail to start.
450                 Log.e(TAG, "IllegalStateException during decoder stop", exception);
451             } finally {
452                 try {
453                     decoder.release();
454                 } catch (IllegalStateException exception) {
455                     Log.e(TAG, "IllegalStateException during decoder release", exception);
456                 }
457                 decoder = null;
458             }
459         }
460 
extractorRelease()461         private void extractorRelease() {
462             if (extractor == null) {
463                 return;
464             }
465             try {
466                 extractor.release();
467             } catch (IllegalStateException exception) {
468                 Log.e(TAG, "IllegalStateException during extractor release", exception);
469             }
470         }
471 
configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)472         private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) {
473             checkNotNull(mediaFormat);
474             checkNotNull(videoFormat);
475             videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME));
476             videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH));
477             videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT));
478             mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth());
479             mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight());
480             if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) {
481                 return;
482             }
483             if (videoFormat.getMaxWidth() != VideoFormat.INT_UNSET
484                 && videoFormat.getMaxHeight() != VideoFormat.INT_UNSET) {
485                 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getMaxWidth());
486                 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getMaxHeight());
487             }
488         }
489 
490         /**
491          * The function returns the first track found based on the media type.
492          */
getFirstTrackIndexByType(String format)493         private int getFirstTrackIndexByType(String format) {
494             for (int i = 0; i < extractor.getTrackCount(); i++) {
495                 MediaFormat trackMediaFormat = extractor.getTrackFormat(i);
496                 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) {
497                     return i;
498                 }
499             }
500             Log.e(TAG, "couldn't get a " + format + " track");
501             return NO_TRACK_INDEX;
502         }
503 
504         /**
505          * Stores the result from SimplePlayer.
506          */
507         public static final class PlayerResult {
508 
509             public static final int UNSET = -1;
510             private final boolean configureSuccess;
511             private final boolean startSuccess;
512             private final boolean decodeSuccess;
513             private final long totalTime;
514 
PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)515             public PlayerResult(
516                     boolean configureSuccess, boolean startSuccess,
517                     boolean decodeSuccess, long totalTime) {
518                 this.configureSuccess = configureSuccess;
519                 this.startSuccess = startSuccess;
520                 this.decodeSuccess = decodeSuccess;
521                 this.totalTime = totalTime;
522             }
523 
PlayerResult(PlayerResult playerResult)524             public PlayerResult(PlayerResult playerResult) {
525                 this(playerResult.configureSuccess, playerResult.startSuccess,
526                         playerResult.decodeSuccess, playerResult.totalTime);
527             }
528 
PlayerResult()529             public PlayerResult() {
530                 // Dummy PlayerResult.
531                 this(false, false, false, UNSET);
532             }
533 
failToStart()534             public static PlayerResult failToStart() {
535                 return new PlayerResult(true, false, false, UNSET);
536             }
537 
getFailureMessage()538             public String getFailureMessage() {
539                 if (!configureSuccess) {
540                     return "Failed to configure decoder.";
541                 } else if (!startSuccess) {
542                     return "Failed to start decoder.";
543                 } else if (!decodeSuccess) {
544                     return "Failed to decode the expected number of frames.";
545                 } else {
546                     return "Failed to finish decoding.";
547                 }
548             }
549 
isConfigureSuccess()550             public boolean isConfigureSuccess() {
551                 return configureSuccess;
552             }
553 
isSuccess()554             public boolean isSuccess() {
555                 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET;
556             }
557 
getTotalTime()558             public long getTotalTime() {
559                 return totalTime;
560             }
561 
562         }
563 
564     }
565 
566     /* Utility class for collecting common test case functionality. */
567     class TestHelper {
568 
569         private final String TAG =  TestHelper.class.getSimpleName();
570 
571         private final Context context;
572         private final Handler handler;
573         private final Activity activity;
574 
TestHelper(Context context, Activity activity)575         public TestHelper(Context context, Activity activity) {
576             this.context = checkNotNull(context);
577             this.handler = new Handler(Looper.getMainLooper());
578             this.activity = activity;
579         }
580 
generateBitmapFromImageResourceId(int resourceId)581         public Bitmap generateBitmapFromImageResourceId(int resourceId) {
582             return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId));
583         }
584 
getContext()585         public Context getContext() {
586             return context;
587         }
588 
rotateOrientation()589         public void rotateOrientation() {
590             handler.post(new Runnable() {
591                 @Override
592                 public void run() {
593                     final int orientation = context.getResources().getConfiguration().orientation;
594                     if (orientation == Configuration.ORIENTATION_PORTRAIT) {
595                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
596                     } else {
597                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
598                     }
599                 }
600             });
601         }
602 
unsetOrientation()603         public void unsetOrientation() {
604             handler.post(new Runnable() {
605                 @Override
606                 public void run() {
607                     activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
608                 }
609             });
610         }
611 
generateView(View view)612         public void generateView(View view) {
613             RelativeLayout relativeLayout =
614                     (RelativeLayout) activity.findViewById(R.id.attach_view);
615             ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view);
616             handler.post(viewGenerator);
617         }
618 
cleanUpView(View view)619         public void cleanUpView(View view) {
620             ViewCleaner viewCleaner = new ViewCleaner(view);
621             handler.post(viewCleaner);
622         }
623 
generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)624         public synchronized Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) {
625             final long timeOutMs = TimeUnit.SECONDS.toMillis(30);
626             final long start = SystemClock.elapsedRealtime();
627             handler.post(snapshot);
628             try {
629                 while (!snapshot.isBitmapReady()
630                         && (SystemClock.elapsedRealtime() - start < timeOutMs)) {
631                     Thread.sleep(100);
632                 }
633             } catch (InterruptedException e) {
634                 e.printStackTrace();
635                 return null;
636             }
637             if (!snapshot.isBitmapReady()) {
638                 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot().");
639                 return null;
640             }
641             return snapshot.getBitmap();
642         }
643 
644         private class ViewGenerator implements Runnable {
645 
646             private final View view;
647             private final RelativeLayout relativeLayout;
648 
ViewGenerator(RelativeLayout relativeLayout, View view)649             public ViewGenerator(RelativeLayout relativeLayout, View view) {
650                 this.view = checkNotNull(view);
651                 this.relativeLayout = checkNotNull(relativeLayout);
652             }
653 
654             @Override
run()655             public void run() {
656                 if (view.getParent() != null) {
657                     ((ViewGroup) view.getParent()).removeView(view);
658                 }
659                 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
660                         VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT);
661                 view.setLayoutParams(params);
662                 relativeLayout.addView(view);
663             }
664 
665         }
666 
667         private class ViewCleaner implements Runnable {
668 
669             private final View view;
670 
ViewCleaner(View view)671             public ViewCleaner(View view) {
672                 this.view = checkNotNull(view);
673             }
674 
675             @Override
run()676             public void run() {
677                 if (view.getParent() != null) {
678                     ((ViewGroup) view.getParent()).removeView(view);
679                 }
680             }
681 
682         }
683 
684     }
685 
686 }
687 
688 /* Factory for manipulating a {@link View}. */
689 abstract class VideoViewFactory {
690 
691     public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1);
692     public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3);
693     public static final int VIEW_WIDTH = 480;
694     public static final int VIEW_HEIGHT = 360;
695 
VideoViewFactory()696     public VideoViewFactory() {}
697 
release()698     public abstract void release();
699 
getName()700     public abstract String getName();
701 
createView(Context context)702     public abstract View createView(Context context);
703 
waitForViewIsAvailable()704     public void waitForViewIsAvailable() throws Exception {
705         waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS);
706     };
707 
waitForViewIsAvailable(long timeOutMs)708     public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception;
709 
getSurface()710     public abstract Surface getSurface();
711 
getVideoViewSnapshot()712     public abstract VideoViewSnapshot getVideoViewSnapshot();
713 
hasLooper()714     public boolean hasLooper() {
715         return Looper.myLooper() != null;
716     }
717 
718 }
719 
720 /* Factory for building a {@link TextureView}. */
721 @TargetApi(16)
722 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener {
723 
724     private static final String TAG = TextureViewFactory.class.getSimpleName();
725     private static final String NAME = "TextureView";
726 
727     private final Object syncToken = new Object();
728     private TextureView textureView;
729 
TextureViewFactory()730     public TextureViewFactory() {}
731 
732     @Override
createView(Context context)733     public TextureView createView(Context context) {
734         Log.i(TAG, "Creating a " + NAME);
735         textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context));
736         textureView.setSurfaceTextureListener(this);
737         return textureView;
738     }
739 
740     @Override
release()741     public void release() {
742         textureView = null;
743     }
744 
745     @Override
getName()746     public String getName() {
747         return NAME;
748     }
749 
750     @Override
getSurface()751     public Surface getSurface() {
752         return new Surface(textureView.getSurfaceTexture());
753     }
754 
755     @Override
getVideoViewSnapshot()756     public TextureViewSnapshot getVideoViewSnapshot() {
757         return new TextureViewSnapshot(textureView);
758     }
759 
760     @Override
waitForViewIsAvailable(long timeOutMs)761     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
762         final long start = SystemClock.elapsedRealtime();
763         while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) {
764             synchronized (syncToken) {
765                 try {
766                     syncToken.wait(VIEW_WAITTIME_MS);
767                 } catch (InterruptedException e) {
768                     Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e);
769                     throw new InterruptedException(e.getMessage());
770                 }
771             }
772         }
773         if (!textureView.isAvailable()) {
774             throw new InterruptedException("Taking too long to attach a TextureView to a window.");
775         }
776         Log.i(TAG, NAME + " is available.");
777     }
778 
779     @Override
onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)780     public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
781         synchronized (syncToken) {
782             syncToken.notify();
783         }
784     }
785 
786     @Override
onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)787     public void onSurfaceTextureSizeChanged(
788             SurfaceTexture surfaceTexture, int width, int height) {}
789 
790     @Override
onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)791     public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
792         return false;
793     }
794 
795     @Override
onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)796     public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
797 
798 }
799 
800 /**
801  * Factory for building a {@link SurfaceView}
802  */
803 @TargetApi(24)
804 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback {
805 
806     private static final String TAG = SurfaceViewFactory.class.getSimpleName();
807     private static final String NAME = "SurfaceView";
808 
809     private final Object syncToken = new Object();
810     private SurfaceViewSnapshot surfaceViewSnapshot;
811     private SurfaceView surfaceView;
812     private SurfaceHolder surfaceHolder;
813 
SurfaceViewFactory()814     public SurfaceViewFactory() {}
815 
816     @Override
release()817     public void release() {
818         if (surfaceViewSnapshot != null) {
819             surfaceViewSnapshot.release();
820         }
821         surfaceView = null;
822         surfaceHolder = null;
823     }
824 
825     @Override
getName()826     public String getName() {
827         return NAME;
828     }
829 
830     @Override
createView(Context context)831     public View createView(Context context) {
832         Log.i(TAG, "Creating a " + NAME);
833         if (!super.hasLooper()) {
834             Looper.prepare();
835         }
836         surfaceView = new SurfaceView(context);
837         surfaceHolder = surfaceView.getHolder();
838         surfaceHolder.addCallback(this);
839         return surfaceView;
840     }
841 
842     @Override
waitForViewIsAvailable(long timeOutMs)843     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
844         final long start = SystemClock.elapsedRealtime();
845         while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) {
846             synchronized (syncToken) {
847                 try {
848                     syncToken.wait(VIEW_WAITTIME_MS);
849                 } catch (InterruptedException e) {
850                     Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e);
851                     throw new InterruptedException(e.getMessage());
852                 }
853             }
854         }
855         if (!getSurface().isValid()) {
856             throw new InterruptedException("Taking too long to attach a SurfaceView to a window.");
857         }
858         Log.i(TAG, NAME + " is available.");
859     }
860 
861     @Override
getSurface()862     public Surface getSurface() {
863         return surfaceHolder == null ? null : surfaceHolder.getSurface();
864     }
865 
866     @Override
getVideoViewSnapshot()867     public VideoViewSnapshot getVideoViewSnapshot() {
868         surfaceViewSnapshot = new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT);
869         return surfaceViewSnapshot;
870     }
871 
872     @Override
surfaceChanged(SurfaceHolder holder, int format, int width, int height)873     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
874 
875     @Override
surfaceCreated(SurfaceHolder holder)876     public void surfaceCreated(SurfaceHolder holder) {
877         synchronized (syncToken) {
878             syncToken.notify();
879         }
880     }
881 
882     @Override
surfaceDestroyed(SurfaceHolder holder)883     public void surfaceDestroyed(SurfaceHolder holder) {}
884 
885 }
886 
887 /**
888  * Factory for building EGL and GLES that could render to GLSurfaceView.
889  * {@link GLSurfaceView} {@link EGL10} {@link GLES20}.
890  */
891 @TargetApi(16)
892 class GLSurfaceViewFactory extends VideoViewFactory {
893 
894     private static final String TAG = GLSurfaceViewFactory.class.getSimpleName();
895     private static final String NAME = "GLSurfaceView";
896 
897     private final Object surfaceSyncToken = new Object();
898 
899     private GLSurfaceViewThread glSurfaceViewThread;
900     private boolean byteBufferIsReady = false;
901 
GLSurfaceViewFactory()902     public GLSurfaceViewFactory() {}
903 
904     @Override
release()905     public void release() {
906         glSurfaceViewThread.release();
907         glSurfaceViewThread = null;
908     }
909 
910     @Override
getName()911     public String getName() {
912         return NAME;
913     }
914 
915     @Override
createView(Context context)916     public View createView(Context context) {
917         Log.i(TAG, "Creating a " + NAME);
918         // Do all GL rendering in the GL thread.
919         glSurfaceViewThread = new GLSurfaceViewThread();
920         glSurfaceViewThread.start();
921         // No necessary view to display, return null.
922         return null;
923     }
924 
925     @Override
waitForViewIsAvailable(long timeOutMs)926     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
927         final long start = SystemClock.elapsedRealtime();
928         while (SystemClock.elapsedRealtime() - start < timeOutMs
929                 && glSurfaceViewThread.getSurface() == null) {
930             synchronized (surfaceSyncToken) {
931                 try {
932                     surfaceSyncToken.wait(VIEW_WAITTIME_MS);
933                 } catch (InterruptedException e) {
934                     Log.e(TAG, "Exception occurred when waiting for the surface from"
935                             + " GLSurfaceView to become available.", e);
936                     throw new InterruptedException(e.getMessage());
937                 }
938             }
939         }
940         if (glSurfaceViewThread.getSurface() == null) {
941             throw new InterruptedException("Taking too long for the surface from"
942                     + " GLSurfaceView to become available.");
943         }
944         Log.i(TAG, NAME + " is available.");
945     }
946 
947     @Override
getSurface()948     public Surface getSurface() {
949         return glSurfaceViewThread.getSurface();
950     }
951 
952     @Override
getVideoViewSnapshot()953     public VideoViewSnapshot getVideoViewSnapshot() {
954         return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT);
955     }
956 
byteBufferIsReady()957     public boolean byteBufferIsReady() {
958         return byteBufferIsReady;
959     }
960 
getByteBuffer()961     public ByteBuffer getByteBuffer() {
962         return glSurfaceViewThread.getByteBuffer();
963     }
964 
965     /* Does all GL operations. */
966     private class GLSurfaceViewThread extends Thread
967             implements SurfaceTexture.OnFrameAvailableListener {
968 
969         private static final int FLOAT_SIZE_BYTES = 4;
970         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
971         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
972         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
973         private FloatBuffer triangleVertices;
974         private float[] textureTransform = new float[16];
975 
976         private float[] triangleVerticesData = {
977             // X, Y, Z, U, V
978             -1f, -1f,  0f,  0f,  1f,
979              1f, -1f,  0f,  1f,  1f,
980             -1f,  1f,  0f,  0f,  0f,
981              1f,  1f,  0f,  1f,  0f,
982         };
983         // Make the top-left corner corresponds to texture coordinate
984         // (0, 0). This complies with the transformation matrix obtained from
985         // SurfaceTexture.getTransformMatrix.
986 
987         private static final String VERTEX_SHADER =
988                 "attribute vec4 aPosition;\n"
989                 + "attribute vec4 aTextureCoord;\n"
990                 + "uniform mat4 uTextureTransform;\n"
991                 + "varying vec2 vTextureCoord;\n"
992                 + "void main() {\n"
993                 + "    gl_Position = aPosition;\n"
994                 + "    vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n"
995                 + "}\n";
996 
997         private static final String FRAGMENT_SHADER =
998                 "#extension GL_OES_EGL_image_external : require\n"
999                 + "precision mediump float;\n"      // highp here doesn't seem to matter
1000                 + "varying vec2 vTextureCoord;\n"
1001                 + "uniform samplerExternalOES sTexture;\n"
1002                 + "void main() {\n"
1003                 + "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
1004                 + "}\n";
1005 
1006         private int glProgram;
1007         private int textureID = -1;
1008         private int aPositionHandle;
1009         private int aTextureHandle;
1010         private int uTextureTransformHandle;
1011         private EGLDisplay eglDisplay = null;
1012         private EGLContext eglContext = null;
1013         private EGLSurface eglSurface = null;
1014         private EGL10 egl10;
1015         private Surface surface = null;
1016         private SurfaceTexture surfaceTexture;
1017         private ByteBuffer byteBuffer;
1018         private Looper looper;
1019 
GLSurfaceViewThread()1020         public GLSurfaceViewThread() {}
1021 
1022         @Override
run()1023         public void run() {
1024             Looper.prepare();
1025             looper = Looper.myLooper();
1026             triangleVertices = ByteBuffer
1027                     .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES)
1028                     .order(ByteOrder.nativeOrder()).asFloatBuffer();
1029             triangleVertices.put(triangleVerticesData).position(0);
1030 
1031             eglSetup();
1032             makeCurrent();
1033             eglSurfaceCreated();
1034 
1035             surfaceTexture = new SurfaceTexture(getTextureId());
1036             surfaceTexture.setOnFrameAvailableListener(this);
1037             surface = new Surface(surfaceTexture);
1038             synchronized (surfaceSyncToken) {
1039                 surfaceSyncToken.notify();
1040             }
1041             // Store pixels from surface
1042             byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4);
1043             byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
1044             Looper.loop();
1045         }
1046 
1047         @Override
onFrameAvailable(SurfaceTexture st)1048         public void onFrameAvailable(SurfaceTexture st) {
1049             checkGlError("before updateTexImage");
1050             surfaceTexture.updateTexImage();
1051             st.getTransformMatrix(textureTransform);
1052             drawFrame();
1053             saveFrame();
1054         }
1055 
1056         /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */
eglSetup()1057         public void eglSetup() {
1058             egl10 = (EGL10) EGLContext.getEGL();
1059             eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
1060             if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
1061                 throw new RuntimeException("unable to get egl10 display");
1062             }
1063             int[] version = new int[2];
1064             if (!egl10.eglInitialize(eglDisplay, version)) {
1065                 eglDisplay = null;
1066                 throw new RuntimeException("unable to initialize egl10");
1067             }
1068             // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
1069             int[] configAttribs = {
1070                 EGL10.EGL_RED_SIZE, 8,
1071                 EGL10.EGL_GREEN_SIZE, 8,
1072                 EGL10.EGL_BLUE_SIZE, 8,
1073                 EGL10.EGL_ALPHA_SIZE, 8,
1074                 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
1075                 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
1076                 EGL10.EGL_NONE
1077             };
1078             EGLConfig[] configs = new EGLConfig[1];
1079             int[] numConfigs = new int[1];
1080             if (!egl10.eglChooseConfig(
1081                     eglDisplay, configAttribs, configs, configs.length, numConfigs)) {
1082                 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
1083             }
1084             // Configure EGL context for OpenGL ES 2.0.
1085             int[] contextAttribs = {
1086                 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
1087                 EGL10.EGL_NONE
1088             };
1089             eglContext = egl10.eglCreateContext(
1090                     eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs);
1091             checkEglError("eglCreateContext");
1092             if (eglContext == null) {
1093                 throw new RuntimeException("null context");
1094             }
1095             // Create a pbuffer surface.
1096             int[] surfaceAttribs = {
1097                 EGL10.EGL_WIDTH, VIEW_WIDTH,
1098                 EGL10.EGL_HEIGHT, VIEW_HEIGHT,
1099                 EGL10.EGL_NONE
1100             };
1101             eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs);
1102             checkEglError("eglCreatePbufferSurface");
1103             if (eglSurface == null) {
1104                 throw new RuntimeException("surface was null");
1105             }
1106         }
1107 
release()1108         public void release() {
1109             looper.quit();
1110             if (eglDisplay != EGL10.EGL_NO_DISPLAY) {
1111                 egl10.eglMakeCurrent(eglDisplay,
1112                         EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
1113                 egl10.eglDestroySurface(eglDisplay, eglSurface);
1114                 egl10.eglDestroyContext(eglDisplay, eglContext);
1115                 egl10.eglTerminate(eglDisplay);
1116             }
1117             eglDisplay = EGL10.EGL_NO_DISPLAY;
1118             eglContext = EGL10.EGL_NO_CONTEXT;
1119             eglSurface = EGL10.EGL_NO_SURFACE;
1120             surface.release();
1121             surfaceTexture.release();
1122             byteBufferIsReady = false;
1123             byteBuffer =  null;
1124         }
1125 
1126         /* Makes our EGL context and surface current. */
makeCurrent()1127         public void makeCurrent() {
1128             if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
1129                 throw new RuntimeException("eglMakeCurrent failed");
1130             }
1131             checkEglError("eglMakeCurrent");
1132         }
1133 
1134         /* Call this after the EGL Surface is created and made current. */
eglSurfaceCreated()1135         public void eglSurfaceCreated() {
1136             glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
1137             if (glProgram == 0) {
1138                 throw new RuntimeException("failed creating program");
1139             }
1140             aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
1141             checkLocation(aPositionHandle, "aPosition");
1142             aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
1143             checkLocation(aTextureHandle, "aTextureCoord");
1144             uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform");
1145             checkLocation(uTextureTransformHandle, "uTextureTransform");
1146 
1147             int[] textures = new int[1];
1148             GLES20.glGenTextures(1, textures, 0);
1149             checkGlError("glGenTextures");
1150             textureID = textures[0];
1151             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1152             checkGlError("glBindTexture");
1153 
1154             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
1155                     GLES20.GL_LINEAR);
1156             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
1157                     GLES20.GL_LINEAR);
1158             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
1159                     GLES20.GL_CLAMP_TO_EDGE);
1160             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
1161                     GLES20.GL_CLAMP_TO_EDGE);
1162             checkGlError("glTexParameter");
1163         }
1164 
drawFrame()1165         public void drawFrame() {
1166             GLES20.glUseProgram(glProgram);
1167             checkGlError("glUseProgram");
1168             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
1169             checkGlError("glActiveTexture");
1170             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1171             checkGlError("glBindTexture");
1172 
1173             triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
1174             GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
1175                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1176             checkGlError("glVertexAttribPointer aPositionHandle");
1177             GLES20.glEnableVertexAttribArray(aPositionHandle);
1178             checkGlError("glEnableVertexAttribArray aPositionHandle");
1179 
1180             triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
1181             GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
1182                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1183             checkGlError("glVertexAttribPointer aTextureHandle");
1184             GLES20.glEnableVertexAttribArray(aTextureHandle);
1185             checkGlError("glEnableVertexAttribArray aTextureHandle");
1186 
1187             GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0);
1188             checkGlError("glUniformMatrix uTextureTransformHandle");
1189 
1190             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
1191             checkGlError("glDrawArrays");
1192             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
1193         }
1194 
1195         /* Reads the pixels to a ByteBuffer. */
saveFrame()1196         public void saveFrame() {
1197             byteBufferIsReady = false;
1198             byteBuffer.clear();
1199             GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA,
1200                     GLES20.GL_UNSIGNED_BYTE, byteBuffer);
1201             byteBufferIsReady = true;
1202         }
1203 
getTextureId()1204         public int getTextureId() {
1205             return textureID;
1206         }
1207 
getSurface()1208         public Surface getSurface() {
1209             return surface;
1210         }
1211 
getByteBuffer()1212         public ByteBuffer getByteBuffer() {
1213             return byteBuffer;
1214         }
1215 
loadShader(int shaderType, String source)1216         private int loadShader(int shaderType, String source) {
1217             int shader = GLES20.glCreateShader(shaderType);
1218             checkGlError("glCreateShader type=" + shaderType);
1219             GLES20.glShaderSource(shader, source);
1220             GLES20.glCompileShader(shader);
1221             int[] compiled = new int[1];
1222             GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
1223 
1224             if (compiled[0] == 0) {
1225                 Log.e(TAG, "Could not compile shader " + shaderType + ":");
1226                 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
1227                 GLES20.glDeleteShader(shader);
1228                 shader = 0;
1229             }
1230             return shader;
1231         }
1232 
createProgram(String vertexSource, String fragmentSource)1233         private int createProgram(String vertexSource, String fragmentSource) {
1234             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
1235             if (vertexShader == 0) {
1236                 return 0;
1237             }
1238             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
1239             if (pixelShader == 0) {
1240                 return 0;
1241             }
1242             int program = GLES20.glCreateProgram();
1243             if (program == 0) {
1244                 Log.e(TAG, "Could not create program");
1245             }
1246             GLES20.glAttachShader(program, vertexShader);
1247             checkGlError("glAttachShader");
1248             GLES20.glAttachShader(program, pixelShader);
1249             checkGlError("glAttachShader");
1250             GLES20.glLinkProgram(program);
1251             int[] linkStatus = new int[1];
1252             GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
1253 
1254             if (linkStatus[0] != GLES20.GL_TRUE) {
1255                 Log.e(TAG, "Could not link program: ");
1256                 Log.e(TAG, GLES20.glGetProgramInfoLog(program));
1257                 GLES20.glDeleteProgram(program);
1258                 program = 0;
1259             }
1260             return program;
1261         }
1262 
checkEglError(String msg)1263         private void checkEglError(String msg) {
1264             int error;
1265             if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) {
1266                 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
1267             }
1268         }
1269 
checkGlError(String op)1270         public void checkGlError(String op) {
1271             int error;
1272             if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
1273                 Log.e(TAG, op + ": glError " + error);
1274                 throw new RuntimeException(op + ": glError " + error);
1275             }
1276         }
1277 
checkLocation(int location, String label)1278         public void checkLocation(int location, String label) {
1279             if (location < 0) {
1280                 throw new RuntimeException("Unable to locate '" + label + "' in program");
1281             }
1282         }
1283     }
1284 
1285 }
1286 
1287 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */
1288 abstract class VideoViewSnapshot implements Runnable {
1289 
getBitmap()1290     public abstract Bitmap getBitmap();
1291 
isBitmapReady()1292     public abstract boolean isBitmapReady();
1293 
1294 }
1295 
1296 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. */
1297 class TextureViewSnapshot extends VideoViewSnapshot {
1298 
1299     private final TextureView tv;
1300     private Bitmap bitmap = null;
1301 
TextureViewSnapshot(TextureView tv)1302     public TextureViewSnapshot(TextureView tv) {
1303         this.tv = DecodeAccuracyTestBase.checkNotNull(tv);
1304     }
1305 
1306     @Override
run()1307     public synchronized void run() {
1308         bitmap = tv.getBitmap();
1309     }
1310 
1311     @Override
getBitmap()1312     public Bitmap getBitmap() {
1313         return bitmap;
1314     }
1315 
1316     @Override
isBitmapReady()1317     public boolean isBitmapReady() {
1318         return bitmap != null;
1319     }
1320 
1321 }
1322 
1323 /**
1324  * Method to get bitmap of a {@link SurfaceView}.
1325  */
1326 class SurfaceViewSnapshot extends VideoViewSnapshot  {
1327 
1328     private static final String TAG = SurfaceViewSnapshot.class.getSimpleName();
1329     private static final int PIXELCOPY_REQUEST_SLEEP_MS = 30;
1330     private static final int PIXELCOPY_TIMEOUT_MS = 1000;
1331 
1332     private Thread copyThread;
1333     private SynchronousPixelCopy copyHelper;
1334     private Bitmap bitmap;
1335     private int copyResult;
1336 
SurfaceViewSnapshot(final SurfaceView surfaceView, final int width, final int height)1337     public SurfaceViewSnapshot(final SurfaceView surfaceView, final int width, final int height) {
1338         this.copyThread = new Thread(new Runnable() {
1339             @Override
1340             public void run() {
1341                 copyHelper = new SynchronousPixelCopy();
1342                 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);
1343                 try {
1344                     // Wait for SurfaceView to be available.
1345                     while ((copyResult = copyHelper.request(surfaceView, bitmap))
1346                             != PixelCopy.SUCCESS) {
1347                         Thread.sleep(PIXELCOPY_REQUEST_SLEEP_MS);
1348                     }
1349                 } catch (InterruptedException e) {
1350                     Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e);
1351                     bitmap = null;
1352                 }
1353                 copyHelper.release();
1354             }
1355         });
1356         copyThread.start();
1357     }
1358 
1359     @Override
run()1360     public synchronized void run() {}
1361 
1362     @Override
getBitmap()1363     public Bitmap getBitmap() {
1364         return bitmap;
1365     }
1366 
1367     @Override
isBitmapReady()1368     public boolean isBitmapReady() {
1369         return copyResult == PixelCopy.SUCCESS;
1370     }
1371 
release()1372     public void release() {
1373         if (copyThread.isAlive()) {
1374             copyThread.interrupt();
1375         }
1376         copyThread = null;
1377         if (copyHelper != null) {
1378             copyHelper.release();
1379             copyHelper = null;
1380         }
1381         bitmap = null;
1382     }
1383 
1384     private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener {
1385 
1386         private final Handler handler;
1387         private final HandlerThread thread;
1388 
1389         private int status = -1;
1390 
SynchronousPixelCopy()1391         public SynchronousPixelCopy() {
1392             this.thread = new HandlerThread("PixelCopyHelper");
1393             thread.start();
1394             this.handler = new Handler(thread.getLooper());
1395         }
1396 
release()1397         public void release() {
1398             if (thread.isAlive()) {
1399                 thread.quit();
1400             }
1401         }
1402 
request(SurfaceView source, Bitmap dest)1403         public int request(SurfaceView source, Bitmap dest) {
1404             synchronized (this) {
1405                 try {
1406                     PixelCopy.request(source, dest, this, handler);
1407                     return getResultLocked();
1408                 } catch (Exception e) {
1409                     Log.e(TAG, "Exception occurred when copying a SurfaceView.", e);
1410                     return -1;
1411                 }
1412             }
1413         }
1414 
getResultLocked()1415         private int getResultLocked() {
1416             try {
1417                 this.wait(PIXELCOPY_TIMEOUT_MS);
1418             } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ }
1419             return status;
1420         }
1421 
1422         @Override
onPixelCopyFinished(int copyResult)1423         public void onPixelCopyFinished(int copyResult) {
1424             synchronized (this) {
1425                 status = copyResult;
1426                 this.notify();
1427             }
1428         }
1429 
1430     }
1431 
1432 }
1433 
1434 /**
1435  * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler.
1436  * Note, because of how the bitmap is captured in GLSurfaceView,
1437  * this method does not have to be a runnable.
1438  */
1439 class GLSurfaceViewSnapshot extends VideoViewSnapshot {
1440 
1441     private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName();
1442     private static final int GET_BYTEBUFFER_SLEEP_MS = 30;
1443     private static final int GET_BYTEBUFFER_MAX_ATTEMPTS = 30;
1444 
1445     private final GLSurfaceViewFactory glSurfaceViewFactory;
1446     private final int width;
1447     private final int height;
1448 
1449     private Bitmap bitmap = null;
1450     private boolean bitmapIsReady = false;
1451 
GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1452     public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) {
1453         this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory);
1454         this.width = width;
1455         this.height = height;
1456     }
1457 
1458     @Override
run()1459     public synchronized void run() {
1460         try {
1461             waitForByteBuffer();
1462         } catch (InterruptedException exception) {
1463             Log.e(TAG, exception.getMessage());
1464             bitmap = null;
1465             return;
1466         }
1467         try {
1468             final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer();
1469             bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
1470             byteBuffer.rewind();
1471             bitmap.copyPixelsFromBuffer(byteBuffer);
1472             bitmapIsReady = true;
1473             byteBuffer.clear();
1474         } catch (NullPointerException exception) {
1475             Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception);
1476             bitmap = null;
1477         }
1478     }
1479 
1480     @Override
getBitmap()1481     public Bitmap getBitmap() {
1482         return bitmap;
1483     }
1484 
1485     @Override
isBitmapReady()1486     public boolean isBitmapReady() {
1487         return bitmapIsReady;
1488     }
1489 
waitForByteBuffer()1490     public void waitForByteBuffer() throws InterruptedException {
1491         // Wait for byte buffer to be ready.
1492         for (int i = 0; i < GET_BYTEBUFFER_MAX_ATTEMPTS; i++) {
1493             if (glSurfaceViewFactory.byteBufferIsReady()) {
1494                 return;
1495             }
1496             Thread.sleep(GET_BYTEBUFFER_SLEEP_MS);
1497         }
1498         throw new InterruptedException("Taking too long to read pixels into a ByteBuffer.");
1499     }
1500 
1501 }
1502 
1503 /* Stores information of a video file. */
1504 class VideoFormat {
1505 
1506     public static final String STRING_UNSET = "UNSET";
1507     public static final int INT_UNSET = -1;
1508 
1509     private final String filename;
1510 
1511     private String mimeType = STRING_UNSET;
1512     private int width = INT_UNSET;
1513     private int height = INT_UNSET;
1514     private int maxWidth = INT_UNSET;
1515     private int maxHeight = INT_UNSET;
1516     private FilenameParser filenameParser;
1517 
VideoFormat(String filename)1518     public VideoFormat(String filename) {
1519         this.filename = filename;
1520     }
1521 
VideoFormat(VideoFormat videoFormat)1522     public VideoFormat(VideoFormat videoFormat) {
1523         this(videoFormat.filename);
1524     }
1525 
getParsedName()1526     private FilenameParser getParsedName() {
1527         if (filenameParser == null) {
1528             filenameParser = new FilenameParser(filename);
1529         }
1530         return filenameParser;
1531     }
1532 
getMediaFormat()1533     public String getMediaFormat() {
1534         return "video";
1535     }
1536 
setMimeType(String mimeType)1537     public void setMimeType(String mimeType) {
1538         this.mimeType = mimeType;
1539     }
1540 
getMimeType()1541     public String getMimeType() {
1542         if (mimeType.equals(STRING_UNSET)) {
1543             return getParsedName().getMimeType();
1544         }
1545         return mimeType;
1546     }
1547 
setWidth(int width)1548     public void setWidth(int width) {
1549         this.width = width;
1550     }
1551 
setMaxWidth(int maxWidth)1552     public void setMaxWidth(int maxWidth) {
1553         this.maxWidth = maxWidth;
1554     }
1555 
getWidth()1556     public int getWidth() {
1557         if (width == INT_UNSET) {
1558             return getParsedName().getWidth();
1559         }
1560         return width;
1561     }
1562 
getMaxWidth()1563     public int getMaxWidth() {
1564         return maxWidth;
1565     }
1566 
getOriginalWidth()1567     public int getOriginalWidth() {
1568         return getParsedName().getWidth();
1569     }
1570 
setHeight(int height)1571     public void setHeight(int height) {
1572         this.height = height;
1573     }
1574 
setMaxHeight(int maxHeight)1575     public void setMaxHeight(int maxHeight) {
1576         this.maxHeight = maxHeight;
1577     }
1578 
getHeight()1579     public int getHeight() {
1580         if (height == INT_UNSET) {
1581             return getParsedName().getHeight();
1582         }
1583         return height;
1584     }
1585 
getMaxHeight()1586     public int getMaxHeight() {
1587         return maxHeight;
1588     }
1589 
getOriginalHeight()1590     public int getOriginalHeight() {
1591         return getParsedName().getHeight();
1592     }
1593 
getOriginalSize()1594     public String getOriginalSize() {
1595         if (width == INT_UNSET || height == INT_UNSET) {
1596             return getParsedName().getSize();
1597         }
1598         return width + "x" + height;
1599     }
1600 
getDescription()1601     public String getDescription() {
1602         return getParsedName().getDescription();
1603     }
1604 
toPrettyString()1605     public String toPrettyString() {
1606         return getParsedName().toPrettyString();
1607     }
1608 
getAssetFileDescriptor(Context context)1609     public AssetFileDescriptor getAssetFileDescriptor(Context context) {
1610         try {
1611             return context.getAssets().openFd(filename);
1612         } catch (Exception e) {
1613             e.printStackTrace();
1614             return null;
1615         }
1616     }
1617 
1618 }
1619 
1620 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */
1621 class FilenameParser {
1622 
1623     static final String VP9 = "vp9";
1624     static final String H264 = "h264";
1625 
1626     private final String filename;
1627 
1628     private String codec = VideoFormat.STRING_UNSET;
1629     private String description = VideoFormat.STRING_UNSET;
1630     private int width = VideoFormat.INT_UNSET;
1631     private int height = VideoFormat.INT_UNSET;
1632 
FilenameParser(String filename)1633     FilenameParser(String filename) {
1634         this.filename = filename;
1635         parseFilename(filename);
1636     }
1637 
getCodec()1638     public String getCodec() {
1639         return codec;
1640     }
1641 
getMimeType()1642     public String getMimeType() {
1643         switch (codec) {
1644             case H264:
1645                 return MimeTypes.VIDEO_H264;
1646             case VP9:
1647                 return MimeTypes.VIDEO_VP9;
1648             default:
1649                 return null;
1650         }
1651     }
1652 
getWidth()1653     public int getWidth() {
1654         return width;
1655     }
1656 
getHeight()1657     public int getHeight() {
1658         return height;
1659     }
1660 
getSize()1661     public String getSize() {
1662         return width + "x" + height;
1663     }
1664 
getDescription()1665     public String getDescription() {
1666         return description;
1667     }
1668 
toPrettyString()1669     String toPrettyString() {
1670         if (codec != null) {
1671             return codec.toUpperCase() + " " + getSize();
1672         }
1673         return filename;
1674     }
1675 
parseFilename(String filename)1676     private void parseFilename(String filename) {
1677         final String descriptionDelimiter = "-";
1678         final String infoDelimiter = "_";
1679         final String sizeDelimiter = "x";
1680         try {
1681             this.description = filename.split(descriptionDelimiter)[0];
1682             final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter);
1683             this.codec = fileInfo[0];
1684             this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]);
1685             this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]);
1686         } catch (Exception exception) { /* Filename format does not match. */ }
1687     }
1688 
1689 }
1690 
1691 /**
1692  * Compares bitmaps to determine if they are similar.
1693  *
1694  * <p>To determine greatest pixel difference we transform each pixel into the
1695  * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences.
1696  */
1697 class BitmapCompare {
1698 
1699     private static final int RED = 0;
1700     private static final int GREEN = 1;
1701     private static final int BLUE = 2;
1702     private static final int X = 0;
1703     private static final int Y = 1;
1704     private static final int Z = 2;
1705 
BitmapCompare()1706     private BitmapCompare() {}
1707 
1708     /**
1709      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
1710      *
1711      * @param bitmap1 A bitmap to compare to bitmap2.
1712      * @param bitmap2 A bitmap to compare to bitmap1.
1713      * @return A {@link Difference} with an integer describing the greatest pixel difference,
1714      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
1715      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found.
1716      */
1717     @TargetApi(12)
computeDifference(Bitmap bitmap1, Bitmap bitmap2)1718     public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) {
1719         if (bitmap1 == null || bitmap2 == null) {
1720             return new Difference(Integer.MAX_VALUE);
1721         }
1722         if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) {
1723             return new Difference(0);
1724         }
1725         if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) {
1726             return new Difference(Integer.MAX_VALUE);
1727         }
1728         // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using
1729         // euclidean distance formula.
1730         final double[][] pixels1 = convertRgbToCieLab(bitmap1);
1731         final double[][] pixels2 = convertRgbToCieLab(bitmap2);
1732         int greatestDifference = 0;
1733         int greatestDifferenceIndex = -1;
1734         for (int i = 0; i < pixels1.length; i++) {
1735             final int difference = euclideanDistance(pixels1[i], pixels2[i]);
1736             if (difference > greatestDifference) {
1737                 greatestDifference = difference;
1738                 greatestDifferenceIndex = i;
1739             }
1740         }
1741         return new Difference(greatestDifference, Pair.create(
1742             greatestDifferenceIndex % bitmap1.getWidth(),
1743             greatestDifferenceIndex / bitmap1.getHeight()));
1744     }
1745 
1746     @SuppressLint("UseSparseArrays")
convertRgbToCieLab(Bitmap bitmap)1747     private static double[][] convertRgbToCieLab(Bitmap bitmap) {
1748         final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>();
1749         final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3];
1750         final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()];
1751         bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
1752         for (int i = 0; i < pixels.length; i++) {
1753             final double[] transformedColor = pixelTransformCache.get(pixels[i]);
1754             if (transformedColor != null) {
1755                 result[i] = transformedColor;
1756             } else {
1757                 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i]));
1758                 pixelTransformCache.put(pixels[i], result[i]);
1759             }
1760         }
1761         return result;
1762     }
1763 
1764     /**
1765      * Conversion from RGB to XYZ based algorithm as defined by:
1766      * http://www.easyrgb.com/index.php?X=MATH&H=02#text2
1767      *
1768      * <p><pre>{@code
1769      *   var_R = ( R / 255 )        //R from 0 to 255
1770      *   var_G = ( G / 255 )        //G from 0 to 255
1771      *   var_B = ( B / 255 )        //B from 0 to 255
1772      *
1773      *   if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4
1774      *   else                   var_R = var_R / 12.92
1775      *   if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4
1776      *   else                   var_G = var_G / 12.92
1777      *   if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4
1778      *   else                   var_B = var_B / 12.92
1779      *
1780      *   var_R = var_R * 100
1781      *   var_G = var_G * 100
1782      *   var_B = var_B * 100
1783      *
1784      *   // Observer. = 2°, Illuminant = D65
1785      *   X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805
1786      *   Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722
1787      *   Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505
1788      * }</pre>
1789      *
1790      * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue.
1791      * @return An array of doubles where each value is a component of the XYZ color space.
1792      */
convertRgbToXyz(int rgbColor)1793     private static double[] convertRgbToXyz(int rgbColor) {
1794         final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)};
1795         for (int i = 0; i < comp.length; i++) {
1796             comp[i] /= 255.0;
1797             if (comp[i] > 0.04045) {
1798                 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4);
1799             } else {
1800                 comp[i] /= 12.92;
1801             }
1802             comp[i] *= 100;
1803         }
1804         final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805);
1805         final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722);
1806         final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505);
1807         return new double[] {x, y, z};
1808     }
1809 
1810     /**
1811      * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by:
1812      * http://www.easyrgb.com/index.php?X=MATH&H=07#text7
1813      *
1814      * <p><pre>
1815      * {@code
1816      *   var_X = X / ref_X          //ref_X =  95.047   Observer= 2°, Illuminant= D65
1817      *   var_Y = Y / ref_Y          //ref_Y = 100.000
1818      *   var_Z = Z / ref_Z          //ref_Z = 108.883
1819      *
1820      *   if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 )
1821      *   else                    var_X = ( 7.787 * var_X ) + ( 16 / 116 )
1822      *   if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 )
1823      *   else                    var_Y = ( 7.787 * var_Y ) + ( 16 / 116 )
1824      *   if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 )
1825      *   else                    var_Z = ( 7.787 * var_Z ) + ( 16 / 116 )
1826      *
1827      *   CIE-L* = ( 116 * var_Y ) - 16
1828      *   CIE-a* = 500 * ( var_X - var_Y )
1829      *   CIE-b* = 200 * ( var_Y - var_Z )
1830      * }
1831      * </pre>
1832      *
1833      * @param comp An array of doubles where each value is a component of the XYZ color space.
1834      * @return An array of doubles where each value is a component of the CIE-L*a*b* color space.
1835      */
convertXyzToCieLab(double[] comp)1836     private static double[] convertXyzToCieLab(double[] comp) {
1837         comp[X] /= 95.047;
1838         comp[Y] /= 100.0;
1839         comp[Z] /= 108.883;
1840         for (int i = 0; i < comp.length; i++) {
1841             if (comp[i] > 0.008856) {
1842                 comp[i] = Math.pow(comp[i], (1.0 / 3.0));
1843             } else {
1844                 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0);
1845             }
1846         }
1847         final double l = (116 * comp[Y]) - 16;
1848         final double a = 500 * (comp[X] - comp[Y]);
1849         final double b = 200 * (comp[Y] - comp[Z]);
1850         return new double[] {l, a, b};
1851     }
1852 
euclideanDistance(double[] p1, double[] p2)1853     private static int euclideanDistance(double[] p1, double[] p2) {
1854         if (p1.length != p2.length) {
1855             return Integer.MAX_VALUE;
1856         }
1857         double result = 0;
1858         for (int i = 0; i < p1.length; i++) {
1859             result += Math.pow(p1[i] - p2[i], 2);
1860         }
1861         return (int) Math.round(Math.sqrt(result));
1862     }
1863 
1864     /**
1865      * Crops the border of the array representing an image by hBorderSize
1866      * pixels on the left and right borders, and by vBorderSize pixels on the
1867      * top and bottom borders (so the width is 2 * hBorderSize smaller and
1868      * the height is 2 * vBorderSize smaller), then scales the image up to
1869      * match the original size using bilinear interpolation.
1870      */
shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1871     private static Bitmap shrinkAndScaleBilinear(
1872             Bitmap input, double hBorderSize, double vBorderSize) {
1873 
1874         int width = input.getWidth();
1875         int height = input.getHeight();
1876 
1877         // Compute the proper step sizes
1878         double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1);
1879         double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1);
1880 
1881         // Read the input bitmap into RGB arrays.
1882         int[] inputPixels = new int[width * height];
1883         input.getPixels(inputPixels, 0, width, 0, 0, width, height);
1884         int[][] inputRgb = new int[width * height][3];
1885         for (int i = 0; i < width * height; ++i) {
1886             inputRgb[i][0] = Color.red(inputPixels[i]);
1887             inputRgb[i][1] = Color.green(inputPixels[i]);
1888             inputRgb[i][2] = Color.blue(inputPixels[i]);
1889         }
1890         inputPixels = null;
1891 
1892         // Prepare the output buffer.
1893         int[] outputPixels = new int[width * height];
1894 
1895         // Start the iteration. The first y coordinate is vBorderSize.
1896         double y = vBorderSize;
1897         for (int yIndex = 0; yIndex < height; ++yIndex) {
1898             // The first x coordinate is hBorderSize.
1899             double x = hBorderSize;
1900             for (int xIndex = 0; xIndex < width; ++xIndex) {
1901                 // Determine the square of interest.
1902                 int left = (int)x;    // This is floor(x).
1903                 int top = (int)y;     // This is floor(y).
1904                 int right = left + 1;
1905                 int bottom = top + 1;
1906 
1907                 // (u, v) is the fractional part of (x, y).
1908                 double u = x - (double)left;
1909                 double v = y - (double)top;
1910 
1911                 // Precompute necessary products to save time.
1912                 double p00 = (1.0 - u) * (1.0 - v);
1913                 double p01 = (1.0 - u) * v;
1914                 double p10 = u * (1.0 - v);
1915                 double p11 = u * v;
1916 
1917                 // Clamp the indices to prevent out-of-bound that may be caused
1918                 // by round-off error.
1919                 if (left >= width) left = width - 1;
1920                 if (top >= height) top = height - 1;
1921                 if (right >= width) right = width - 1;
1922                 if (bottom >= height) bottom = height - 1;
1923 
1924                 // Sample RGB values from the four corners.
1925                 int[] rgb00 = inputRgb[top * width + left];
1926                 int[] rgb01 = inputRgb[bottom * width + left];
1927                 int[] rgb10 = inputRgb[top * width + right];
1928                 int[] rgb11 = inputRgb[bottom * width + right];
1929 
1930                 // Interpolate each component of RGB separately.
1931                 int[] mixedColor = new int[3];
1932                 for (int k = 0; k < 3; ++k) {
1933                     mixedColor[k] = (int)Math.round(
1934                             p00 * (double) rgb00[k] + p01 * (double) rgb01[k]
1935                             + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]);
1936                 }
1937                 // Convert RGB to bitmap Color format and store.
1938                 outputPixels[yIndex * width + xIndex] = Color.rgb(
1939                         mixedColor[0], mixedColor[1], mixedColor[2]);
1940                 x += xInc;
1941             }
1942             y += yInc;
1943         }
1944         // Assemble the output buffer into a Bitmap object.
1945         return Bitmap.createBitmap(outputPixels, width, height, input.getConfig());
1946     }
1947 
1948     /**
1949      * Calls computeDifference on multiple cropped-and-scaled versions of
1950      * bitmap2.
1951      */
1952     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops)1953     public static Difference computeMinimumDifference(
1954             Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) {
1955 
1956         // Compute the difference with the original image (bitmap2) first.
1957         Difference minDiff = computeDifference(bitmap1, bitmap2);
1958         // Then go through the list of borderCrops.
1959         for (Pair<Double, Double> borderCrop : borderCrops) {
1960             // Compute the difference between bitmap1 and a transformed
1961             // version of bitmap2.
1962             Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second);
1963             Difference d = computeDifference(bitmap1, bitmap2s);
1964             // Keep the minimum difference.
1965             if (d.greatestPixelDifference < minDiff.greatestPixelDifference) {
1966                 minDiff = d;
1967                 minDiff.bestMatchBorderCrop = borderCrop;
1968             }
1969         }
1970         return minDiff;
1971     }
1972 
1973     /**
1974      * Calls computeMinimumDifference on a default list of borderCrop.
1975      */
1976     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight)1977     public static Difference computeMinimumDifference(
1978             Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) {
1979 
1980         double hBorder = (double) bitmap1.getWidth() / (double) trueWidth;
1981         double vBorder = (double) bitmap1.getHeight() / (double) trueHeight;
1982         double hBorderH = 0.5 * hBorder; // Half-texel horizontal border
1983         double vBorderH = 0.5 * vBorder; // Half-texel vertical border
1984         return computeMinimumDifference(
1985                 bitmap1,
1986                 bitmap2,
1987                 new Pair[] {
1988                     Pair.create(hBorderH, 0.0),
1989                     Pair.create(hBorderH, vBorderH),
1990                     Pair.create(0.0, vBorderH),
1991                     Pair.create(hBorder, 0.0),
1992                     Pair.create(hBorder, vBorder),
1993                     Pair.create(0.0, vBorder)
1994                 });
1995         // This default list of borderCrop comes from the behavior of
1996         // GLConsumer.computeTransformMatrix().
1997     }
1998 
1999     /* Describes the difference between two {@link Bitmap} instances. */
2000     public static final class Difference {
2001 
2002         public final int greatestPixelDifference;
2003         public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates;
2004         public Pair<Double, Double> bestMatchBorderCrop;
2005 
Difference(int greatestPixelDifference)2006         private Difference(int greatestPixelDifference) {
2007             this(greatestPixelDifference, null, Pair.create(0.0, 0.0));
2008         }
2009 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2010         private Difference(
2011                 int greatestPixelDifference,
2012                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) {
2013             this(greatestPixelDifference, greatestPixelDifferenceCoordinates,
2014                     Pair.create(0.0, 0.0));
2015         }
2016 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2017         private Difference(
2018                 int greatestPixelDifference,
2019                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates,
2020                 Pair<Double, Double> bestMatchBorderCrop) {
2021             this.greatestPixelDifference = greatestPixelDifference;
2022             this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates;
2023             this.bestMatchBorderCrop = bestMatchBorderCrop;
2024         }
2025     }
2026 
2027 }
2028 
2029 /* Wrapper for MIME types. */
2030 final class MimeTypes {
2031 
MimeTypes()2032     private MimeTypes() {}
2033 
2034     public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9";
2035     public static final String VIDEO_H264 = "video/avc";
2036 
isVideo(String mimeType)2037     public static boolean isVideo(String mimeType) {
2038         return mimeType.startsWith("video");
2039     }
2040 
2041 }
2042