1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.decoder.cts;
17 
18 import static org.junit.Assert.assertNotNull;
19 
20 import android.annotation.SuppressLint;
21 import android.annotation.TargetApi;
22 import android.app.Activity;
23 import android.content.Context;
24 import android.content.Intent;
25 import android.content.pm.ActivityInfo;
26 import android.content.res.AssetFileDescriptor;
27 import android.content.res.Configuration;
28 import android.content.res.Resources;
29 import android.graphics.Bitmap;
30 import android.graphics.Bitmap.Config;
31 import android.graphics.BitmapFactory;
32 import android.graphics.Color;
33 import android.graphics.SurfaceTexture;
34 import android.media.MediaCodec;
35 import android.media.MediaCodec.BufferInfo;
36 import android.media.MediaCodecInfo.VideoCapabilities;
37 import android.media.MediaCodecList;
38 import android.media.MediaExtractor;
39 import android.media.MediaFormat;
40 import android.opengl.EGL14;
41 import android.opengl.GLES11Ext;
42 import android.opengl.GLES20;
43 import android.opengl.GLSurfaceView;
44 import android.os.Build;
45 import android.os.Handler;
46 import android.os.HandlerThread;
47 import android.os.Looper;
48 import android.os.ParcelFileDescriptor;
49 import android.os.SystemClock;
50 import android.util.Log;
51 import android.util.Pair;
52 import android.view.PixelCopy;
53 import android.view.PixelCopy.OnPixelCopyFinishedListener;
54 import android.view.Surface;
55 import android.view.SurfaceHolder;
56 import android.view.SurfaceView;
57 import android.view.TextureView;
58 import android.view.View;
59 import android.view.ViewGroup;
60 import android.widget.RelativeLayout;
61 
62 import androidx.test.rule.ActivityTestRule;
63 
64 import com.android.compatibility.common.util.ApiLevelUtil;
65 import com.android.compatibility.common.util.MediaUtils;
66 
67 import org.junit.After;
68 import org.junit.Assume;
69 import org.junit.Before;
70 import org.junit.Rule;
71 
72 import java.io.File;
73 import java.io.FileNotFoundException;
74 import java.io.IOException;
75 import java.nio.ByteBuffer;
76 import java.nio.ByteOrder;
77 import java.nio.FloatBuffer;
78 import java.util.HashMap;
79 import java.util.concurrent.CountDownLatch;
80 import java.util.concurrent.TimeUnit;
81 
82 import javax.microedition.khronos.egl.EGL10;
83 import javax.microedition.khronos.egl.EGLConfig;
84 import javax.microedition.khronos.egl.EGLContext;
85 import javax.microedition.khronos.egl.EGLDisplay;
86 import javax.microedition.khronos.egl.EGLSurface;
87 
88 @TargetApi(16)
89 public class DecodeAccuracyTestBase {
90 
91     protected Context mContext;
92     protected Resources mResources;
93     protected DecodeAccuracyTestActivity mActivity;
94     protected TestHelper testHelper;
95 
96     @Rule
97     public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule =
98             new ActivityTestRule<>(DecodeAccuracyTestActivity.class);
99 
100     @Before
setUp()101     public void setUp() throws Exception {
102         mActivity = mActivityRule.getActivity();
103         mContext = mActivity.getApplicationContext();
104         mResources = mActivity.getResources();
105         testHelper = new TestHelper(mContext, mActivity);
106     }
107 
108     @After
tearDown()109     public void tearDown() throws Exception {
110         mActivity = null;
111         mResources = null;
112         mContext = null;
113         mActivityRule = null;
114     }
115 
bringActivityToFront()116     protected void bringActivityToFront() {
117         Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class);
118         intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
119         mActivity.startActivity(intent);
120     }
121 
getHelper()122     protected TestHelper getHelper() {
123         return testHelper;
124     }
125 
checkNotNull(T reference)126     public static <T> T checkNotNull(T reference) {
127         assertNotNull(reference);
128         return reference;
129     }
130 
checkNotNull(String msg, T reference)131     public static <T> T checkNotNull(String msg, T reference) {
132         assertNotNull(msg, reference);
133         return reference;
134     }
135 
136     /* Simple Player that decodes a local video file only. */
137     @TargetApi(16)
138     static class SimplePlayer {
139 
140         public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 5; // 5 FPS
141         public static final long STARTUP_ALLOW_MS = TimeUnit.SECONDS.toMillis(1) ;
142         public static final int END_OF_STREAM = -1;
143         public static final int DEQUEUE_SUCCESS = 1;
144         public static final int DEQUEUE_FAIL = 0;
145 
146         private static final String TAG = SimplePlayer.class.getSimpleName();
147         private static final int NO_TRACK_INDEX = -3;
148         private static final long DEQUEUE_TIMEOUT_US = 20;
149 
150         private final Context context;
151         private final MediaExtractor extractor;
152         private final String codecName;
153         private MediaCodec decoder;
154         private byte[] outputBytes;
155         private boolean renderToSurface;
156         private MediaCodecList mediaCodecList;
157         private Surface surface;
158 
SimplePlayer(Context context)159         public SimplePlayer(Context context) {
160             this(context, null);
161         }
162 
SimplePlayer(Context context, String codecName)163         public SimplePlayer(Context context, String codecName) {
164             this.context = checkNotNull(context);
165             this.codecName = codecName;
166             this.extractor = new MediaExtractor();
167             this.renderToSurface = false;
168             this.surface = null;
169         }
170 
171         /**
172          * The function play the corresponding file for certain number of frames.
173          *
174          * @param surface is the surface view of decoder output.
175          * @param videoFormat is the format of the video to extract and decode.
176          * @param numOfTotalFrames is the number of Frame wish to play.
177          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
178          * @return {@link PlayerResult} that consists the result.
179          */
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, boolean releasePlayer)180         public PlayerResult decodeVideoFrames(
181                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap,
182                 boolean releasePlayer) {
183             this.surface = surface;
184             PlayerResult playerResult;
185             if (prepareVideoDecode(videoFormat)) {
186                 if (startDecoder()) {
187                     final long timeout =
188                             Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames + STARTUP_ALLOW_MS;
189                     playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap);
190                 } else {
191                     playerResult = PlayerResult.failToStart();
192                 }
193             } else {
194                 playerResult = new PlayerResult();
195             }
196             if (releasePlayer) {
197                 release();
198             }
199             return new PlayerResult(playerResult);
200         }
201 
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)202         public PlayerResult decodeVideoFrames(
203                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) {
204             return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0, false);
205         }
206 
207         /**
208          * The function sets up the extractor and video decoder with proper format.
209          * This must be called before doing starting up the decoder.
210          */
prepareVideoDecode(VideoFormat videoFormat)211         private boolean prepareVideoDecode(VideoFormat videoFormat) {
212             MediaFormat mediaFormat = prepareExtractor(videoFormat);
213             if (mediaFormat == null) {
214                 return false;
215             }
216             configureVideoFormat(mediaFormat, videoFormat);
217             Assume.assumeTrue("Decoder " + codecName + " doesn't support format " + mediaFormat,
218                     MediaUtils.supports(codecName, mediaFormat));
219             setRenderToSurface(surface != null);
220             return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat);
221         }
222 
223         /**
224          * Sets up the extractor and gets the {@link MediaFormat} of the track.
225          */
prepareExtractor(VideoFormat videoFormat)226         private MediaFormat prepareExtractor(VideoFormat videoFormat) {
227             if (!setExtractorDataSource(videoFormat)) {
228                 return null;
229             }
230             final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat());
231             if (trackNum == NO_TRACK_INDEX) {
232                 return null;
233             }
234             extractor.selectTrack(trackNum);
235             return extractor.getTrackFormat(trackNum);
236         }
237 
238         /**
239          * The function decode video frames and display in a surface.
240          *
241          * @param numOfTotalFrames is the number of frames to be decoded.
242          * @param timeOutMs is the time limit for decoding the frames.
243          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
244          * @return {@link PlayerResult} that consists the result.
245          */
decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)246         private PlayerResult decodeFramesAndPlay(
247                 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) {
248             int numOfDecodedFrames = 0;
249             long firstOutputTimeMs = 0;
250             long lastFrameAt = 0;
251             final long loopStart = SystemClock.elapsedRealtime();
252 
253             while (numOfDecodedFrames < numOfTotalFrames
254                     && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) {
255                 try {
256                     queueDecoderInputBuffer();
257                 } catch (IllegalStateException exception) {
258                     Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception);
259                     break;
260                 }
261                 try {
262                     final int outputResult = dequeueDecoderOutputBuffer();
263                     if (outputResult == SimplePlayer.END_OF_STREAM) {
264                         break;
265                     }
266                     if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) {
267                         if (firstOutputTimeMs == 0) {
268                             firstOutputTimeMs = SystemClock.elapsedRealtime();
269                         }
270                         if (msPerFrameCap > 0) {
271                             // Slow down if cap is set and not reached.
272                             final long delayMs =
273                                     msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt);
274                             if (lastFrameAt != 0 && delayMs > 0) {
275                                 final long threadDelayMs = 3; // In case of delay in thread.
276                                 if (delayMs > threadDelayMs) {
277                                     try {
278                                         Thread.sleep(delayMs - threadDelayMs);
279                                     } catch (InterruptedException ex) { /* */}
280                                 }
281                                 while (SystemClock.elapsedRealtime() - lastFrameAt
282                                         < msPerFrameCap) { /* */ }
283                             }
284                             lastFrameAt = SystemClock.elapsedRealtime();
285                         }
286                         numOfDecodedFrames++;
287                     }
288                 } catch (IllegalStateException exception) {
289                     Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception);
290                 }
291             }
292             // NB: totalTime measures from "first output" instead of
293             // "first INPUT", so does not include first frame latency
294             // and therefore does not tell us if the timeout expired
295             final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs;
296             return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime);
297         }
298 
299         /**
300          * Queues the input buffer with the media file one buffer at a time.
301          *
302          * @return true if success, fail otherwise.
303          */
queueDecoderInputBuffer()304         private boolean queueDecoderInputBuffer() {
305             ByteBuffer inputBuffer;
306             final ByteBuffer[] inputBufferArray = decoder.getInputBuffers();
307             final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
308             if (inputBufferIndex >= 0) {
309                 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
310                     inputBuffer = inputBufferArray[inputBufferIndex];
311                 } else {
312                     inputBuffer = decoder.getInputBuffer(inputBufferIndex);
313                 }
314                 final int sampleSize = extractor.readSampleData(inputBuffer, 0);
315                 if (sampleSize > 0) {
316                     decoder.queueInputBuffer(
317                             inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0);
318                     extractor.advance();
319                 }
320                 return true;
321             }
322             return false;
323         }
324 
325         /**
326          * Dequeues the output buffer.
327          * For video decoder, renders to surface if provided.
328          * For audio decoder, gets the bytes from the output buffer.
329          *
330          * @return an integer indicating its status (fail, success, or end of stream).
331          */
dequeueDecoderOutputBuffer()332         private int dequeueDecoderOutputBuffer() {
333             final BufferInfo info = new BufferInfo();
334             final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US);
335             if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
336                 return END_OF_STREAM;
337             }
338             if (decoderStatus >= 0) {
339                 // For JELLY_BEAN_MR2- devices, when rendering to a surface,
340                 // info.size seems to always return 0 even if
341                 // the decoder successfully decoded the frame.
342                 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) {
343                     return DEQUEUE_FAIL;
344                 }
345                 if (!renderToSurface) {
346                     ByteBuffer outputBuffer;
347                     if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
348                         outputBuffer = decoder.getOutputBuffers()[decoderStatus];
349                     } else {
350                         outputBuffer = decoder.getOutputBuffer(decoderStatus);
351                     }
352                     outputBytes = new byte[info.size];
353                     outputBuffer.get(outputBytes);
354                     outputBuffer.clear();
355                 }
356                 decoder.releaseOutputBuffer(decoderStatus, renderToSurface);
357                 return DEQUEUE_SUCCESS;
358             }
359             return DEQUEUE_FAIL;
360         }
361 
release()362         public void release() {
363             decoderRelease();
364             extractorRelease();
365         }
366 
setExtractorDataSource(VideoFormat videoFormat)367         private boolean setExtractorDataSource(VideoFormat videoFormat) {
368             checkNotNull(videoFormat);
369             try {
370                 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor();
371                 extractor.setDataSource(
372                         afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
373                 afd.close();
374             } catch (IOException exception) {
375                 Log.e(TAG, "IOException in setDataSource", exception);
376                 return false;
377             }
378             return true;
379         }
380 
381         /**
382          * Creates a decoder based on conditions.
383          *
384          * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used.
385          * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)}
386          * is preferred on LOLLIPOP and up for finding out the codec name that
387          * supports the media format.
388          * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used.
389          */
createDecoder(MediaFormat mediaFormat)390         private boolean createDecoder(MediaFormat mediaFormat) {
391             try {
392                 if (codecName != null) {
393                     decoder = MediaCodec.createByCodecName(codecName);
394                 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) {
395                     if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) {
396                         // On LOLLIPOP, format must not contain a frame rate.
397                         mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null);
398                     }
399                     if (mediaCodecList == null) {
400                         mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
401                     }
402                     decoder = MediaCodec.createByCodecName(
403                             mediaCodecList.findDecoderForFormat(mediaFormat));
404                 } else {
405                     decoder = MediaCodec.createDecoderByType(
406                             mediaFormat.getString(MediaFormat.KEY_MIME));
407                 }
408             } catch (Exception exception) {
409                 Log.e(TAG, "Exception during decoder creation", exception);
410                 decoderRelease();
411                 return false;
412             }
413             return true;
414         }
415 
configureDecoder(Surface surface, MediaFormat mediaFormat)416         private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) {
417             try {
418                 decoder.configure(mediaFormat, surface, null, 0);
419             } catch (Exception exception) {
420                 Log.e(TAG, "Exception during decoder configuration", exception);
421                 try {
422                     decoder.reset();
423                 } catch (Exception resetException) {
424                     Log.e(TAG, "Exception during decoder reset", resetException);
425                 }
426                 decoderRelease();
427                 return false;
428             }
429             return true;
430         }
431 
setRenderToSurface(boolean render)432         private void setRenderToSurface(boolean render) {
433             this.renderToSurface = render;
434         }
435 
startDecoder()436         private boolean startDecoder() {
437             try {
438                 decoder.start();
439             } catch (Exception exception) {
440                 Log.e(TAG, "Exception during decoder start", exception);
441                 decoder.reset();
442                 decoderRelease();
443                 return false;
444             }
445             return true;
446         }
447 
decoderRelease()448         private void decoderRelease() {
449             if (decoder == null) {
450                 return;
451             }
452             try {
453                 decoder.stop();
454             } catch (IllegalStateException exception) {
455                 decoder.reset();
456                 // IllegalStateException happens when decoder fail to start.
457                 Log.e(TAG, "IllegalStateException during decoder stop", exception);
458             } finally {
459                 try {
460                     decoder.release();
461                 } catch (IllegalStateException exception) {
462                     Log.e(TAG, "IllegalStateException during decoder release", exception);
463                 }
464                 decoder = null;
465             }
466         }
467 
extractorRelease()468         private void extractorRelease() {
469             if (extractor == null) {
470                 return;
471             }
472             try {
473                 extractor.release();
474             } catch (IllegalStateException exception) {
475                 Log.e(TAG, "IllegalStateException during extractor release", exception);
476             }
477         }
478 
configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)479         private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) {
480             checkNotNull(mediaFormat);
481             checkNotNull(videoFormat);
482             videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME));
483             videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH));
484             videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT));
485             mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth());
486             mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight());
487             if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) {
488                 return;
489             }
490             // Set KEY_MAX_WIDTH and KEY_MAX_HEIGHT when isAbrEnabled() is set.
491             if (videoFormat.isAbrEnabled()) {
492                 try {
493                     // Check for max resolution supported by the codec.
494                     final MediaCodec decoder = MediaUtils.getDecoder(mediaFormat);
495                     final VideoCapabilities videoCapabilities = MediaUtils.getVideoCapabilities(
496                             decoder.getName(), videoFormat.getMimeType());
497                     decoder.release();
498                     final int maxWidth = videoCapabilities.getSupportedWidths().getUpper();
499                     final int maxHeight =
500                             videoCapabilities.getSupportedHeightsFor(maxWidth).getUpper();
501                     if (maxWidth >= videoFormat.getWidth() && maxHeight >= videoFormat.getHeight()) {
502                         mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, maxWidth);
503                         mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, maxHeight);
504                         return;
505                     }
506                 } catch (NullPointerException exception) { /* */ }
507                 // Set max width/height to current size if can't get codec's max supported
508                 // width/height or max is not greater than the current size.
509                 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getWidth());
510                 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getHeight());
511             }
512         }
513 
514         /**
515          * The function returns the first track found based on the media type.
516          */
getFirstTrackIndexByType(String format)517         private int getFirstTrackIndexByType(String format) {
518             for (int i = 0; i < extractor.getTrackCount(); i++) {
519                 MediaFormat trackMediaFormat = extractor.getTrackFormat(i);
520                 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) {
521                     return i;
522                 }
523             }
524             Log.e(TAG, "couldn't get a " + format + " track");
525             return NO_TRACK_INDEX;
526         }
527 
528         /**
529          * Stores the result from SimplePlayer.
530          */
531         public static final class PlayerResult {
532 
533             public static final int UNSET = -1;
534             private final boolean configureSuccess;
535             private final boolean startSuccess;
536             private final boolean decodeSuccess;
537             private final long totalTime;
538 
PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)539             public PlayerResult(
540                     boolean configureSuccess, boolean startSuccess,
541                     boolean decodeSuccess, long totalTime) {
542                 this.configureSuccess = configureSuccess;
543                 this.startSuccess = startSuccess;
544                 this.decodeSuccess = decodeSuccess;
545                 this.totalTime = totalTime;
546             }
547 
PlayerResult(PlayerResult playerResult)548             public PlayerResult(PlayerResult playerResult) {
549                 this(playerResult.configureSuccess, playerResult.startSuccess,
550                         playerResult.decodeSuccess, playerResult.totalTime);
551             }
552 
PlayerResult()553             public PlayerResult() {
554                 // Fake PlayerResult.
555                 this(false, false, false, UNSET);
556             }
557 
failToStart()558             public static PlayerResult failToStart() {
559                 return new PlayerResult(true, false, false, UNSET);
560             }
561 
getFailureMessage()562             public String getFailureMessage() {
563                 if (!configureSuccess) {
564                     return "Failed to configure decoder.";
565                 } else if (!startSuccess) {
566                     return "Failed to start decoder.";
567                 } else if (!decodeSuccess) {
568                     return "Failed to decode the expected number of frames.";
569                 } else {
570                     return "Failed to finish decoding.";
571                 }
572             }
573 
isConfigureSuccess()574             public boolean isConfigureSuccess() {
575                 return configureSuccess;
576             }
577 
isSuccess()578             public boolean isSuccess() {
579                 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET;
580             }
581 
getTotalTime()582             public long getTotalTime() {
583                 return totalTime;
584             }
585 
586         }
587 
588     }
589 
590     /* Utility class for collecting common test case functionality. */
591     class TestHelper {
592 
593         private final String TAG =  TestHelper.class.getSimpleName();
594 
595         private final Context context;
596         private final Handler handler;
597         private final Activity activity;
598 
TestHelper(Context context, Activity activity)599         public TestHelper(Context context, Activity activity) {
600             this.context = checkNotNull(context);
601             this.handler = new Handler(Looper.getMainLooper());
602             this.activity = activity;
603         }
604 
generateBitmapFromImageResourceId(int resourceId)605         public Bitmap generateBitmapFromImageResourceId(int resourceId) {
606             return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId));
607         }
608 
getContext()609         public Context getContext() {
610             return context;
611         }
612 
rotateOrientation()613         public void rotateOrientation() {
614             handler.post(new Runnable() {
615                 @Override
616                 public void run() {
617                     final int orientation = context.getResources().getConfiguration().orientation;
618                     if (orientation == Configuration.ORIENTATION_PORTRAIT) {
619                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
620                     } else {
621                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
622                     }
623                 }
624             });
625         }
626 
unsetOrientation()627         public void unsetOrientation() {
628             handler.post(new Runnable() {
629                 @Override
630                 public void run() {
631                     activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
632                 }
633             });
634         }
635 
generateView(View view)636         public void generateView(View view) {
637             RelativeLayout relativeLayout =
638                     (RelativeLayout) activity.findViewById(R.id.attach_view);
639             ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view);
640             handler.post(viewGenerator);
641         }
642 
cleanUpView(View view)643         public void cleanUpView(View view) {
644             ViewCleaner viewCleaner = new ViewCleaner(view);
645             handler.post(viewCleaner);
646         }
647 
generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)648         public Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) {
649             handler.post(snapshot);
650             synchronized (snapshot.getSyncObject()) {
651                 try {
652                     snapshot.getSyncObject().wait(snapshot.SNAPSHOT_TIMEOUT_MS + 100);
653                 } catch (InterruptedException e) {
654                     e.printStackTrace();
655                     Log.e(TAG, "Unable to finish generateBitmapFromVideoViewSnapshot().");
656                     return null;
657                 }
658             }
659             if (!snapshot.isBitmapReady()) {
660                 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot().");
661                 return null;
662             }
663             return snapshot.getBitmap();
664         }
665 
666         private class ViewGenerator implements Runnable {
667 
668             private final View view;
669             private final RelativeLayout relativeLayout;
670 
ViewGenerator(RelativeLayout relativeLayout, View view)671             public ViewGenerator(RelativeLayout relativeLayout, View view) {
672                 this.view = checkNotNull(view);
673                 this.relativeLayout = checkNotNull(relativeLayout);
674             }
675 
676             @Override
run()677             public void run() {
678                 if (view.getParent() != null) {
679                     ((ViewGroup) view.getParent()).removeView(view);
680                 }
681                 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
682                         VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT);
683                 view.setLayoutParams(params);
684                 relativeLayout.addView(view);
685             }
686 
687         }
688 
689         private class ViewCleaner implements Runnable {
690 
691             private final View view;
692 
ViewCleaner(View view)693             public ViewCleaner(View view) {
694                 this.view = checkNotNull(view);
695             }
696 
697             @Override
run()698             public void run() {
699                 if (view.getParent() != null) {
700                     ((ViewGroup) view.getParent()).removeView(view);
701                 }
702             }
703 
704         }
705 
706     }
707 
708 }
709 
710 /* Factory for manipulating a {@link View}. */
711 abstract class VideoViewFactory {
712 
713     public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1);
714     public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3);
715     public static final int VIEW_WIDTH = 480;
716     public static final int VIEW_HEIGHT = 360;
717 
VideoViewFactory()718     public VideoViewFactory() {}
719 
release()720     public abstract void release();
721 
getName()722     public abstract String getName();
723 
createView(Context context)724     public abstract View createView(Context context);
725 
waitForViewIsAvailable()726     public void waitForViewIsAvailable() throws Exception {
727         waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS);
728     };
729 
waitForViewIsAvailable(long timeOutMs)730     public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception;
731 
getSurface()732     public abstract Surface getSurface();
733 
getVideoViewSnapshot()734     public abstract VideoViewSnapshot getVideoViewSnapshot();
735 
hasLooper()736     public boolean hasLooper() {
737         return Looper.myLooper() != null;
738     }
739 
740 }
741 
742 /* Factory for building a {@link TextureView}. */
743 @TargetApi(16)
744 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener {
745 
746     private static final String TAG = TextureViewFactory.class.getSimpleName();
747     private static final String NAME = "TextureView";
748 
749     private final Object syncToken = new Object();
750     private TextureView textureView;
751 
TextureViewFactory()752     public TextureViewFactory() {}
753 
754     @Override
createView(Context context)755     public TextureView createView(Context context) {
756         Log.i(TAG, "Creating a " + NAME);
757         textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context));
758         textureView.setSurfaceTextureListener(this);
759         return textureView;
760     }
761 
762     @Override
release()763     public void release() {
764         textureView = null;
765     }
766 
767     @Override
getName()768     public String getName() {
769         return NAME;
770     }
771 
772     @Override
getSurface()773     public Surface getSurface() {
774         return new Surface(textureView.getSurfaceTexture());
775     }
776 
777     @Override
getVideoViewSnapshot()778     public TextureViewSnapshot getVideoViewSnapshot() {
779         return new TextureViewSnapshot(textureView);
780     }
781 
782     @Override
waitForViewIsAvailable(long timeOutMs)783     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
784         final long start = SystemClock.elapsedRealtime();
785         while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) {
786             synchronized (syncToken) {
787                 try {
788                     syncToken.wait(VIEW_WAITTIME_MS);
789                 } catch (InterruptedException e) {
790                     Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e);
791                     throw new InterruptedException(e.getMessage());
792                 }
793             }
794         }
795         if (!textureView.isAvailable()) {
796             throw new InterruptedException("Taking too long to attach a TextureView to a window.");
797         }
798         Log.i(TAG, NAME + " is available.");
799     }
800 
801     @Override
onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)802     public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
803         synchronized (syncToken) {
804             syncToken.notify();
805         }
806     }
807 
808     @Override
onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)809     public void onSurfaceTextureSizeChanged(
810             SurfaceTexture surfaceTexture, int width, int height) {}
811 
812     @Override
onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)813     public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
814         return false;
815     }
816 
817     @Override
onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)818     public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
819 
820 }
821 
822 /**
823  * Factory for building a {@link SurfaceView}
824  */
825 @TargetApi(24)
826 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback {
827 
828     private static final String TAG = SurfaceViewFactory.class.getSimpleName();
829     private static final String NAME = "SurfaceView";
830     private final Object syncToken = new Object();
831 
832     private SurfaceView surfaceView;
833     private SurfaceHolder surfaceHolder;
834 
SurfaceViewFactory()835     public SurfaceViewFactory() {}
836 
837     @Override
release()838     public void release() {
839         surfaceView = null;
840         surfaceHolder = null;
841     }
842 
843     @Override
getName()844     public String getName() {
845         return NAME;
846     }
847 
848     @Override
createView(Context context)849     public View createView(Context context) {
850         Log.i(TAG, "Creating a " + NAME);
851         if (!super.hasLooper()) {
852             Looper.prepare();
853         }
854         surfaceView = new SurfaceView(context);
855         surfaceHolder = surfaceView.getHolder();
856         surfaceHolder.addCallback(this);
857         return surfaceView;
858     }
859 
860     @Override
waitForViewIsAvailable(long timeOutMs)861     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
862         final long start = SystemClock.elapsedRealtime();
863         while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) {
864             synchronized (syncToken) {
865                 try {
866                     syncToken.wait(VIEW_WAITTIME_MS);
867                 } catch (InterruptedException e) {
868                     Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e);
869                     throw new InterruptedException(e.getMessage());
870                 }
871             }
872         }
873         if (!getSurface().isValid()) {
874             throw new InterruptedException("Taking too long to attach a SurfaceView to a window.");
875         }
876         Log.i(TAG, NAME + " is available.");
877     }
878 
879     @Override
getSurface()880     public Surface getSurface() {
881         return surfaceHolder == null ? null : surfaceHolder.getSurface();
882     }
883 
884     @Override
getVideoViewSnapshot()885     public VideoViewSnapshot getVideoViewSnapshot() {
886         return new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT);
887     }
888 
889     @Override
surfaceChanged(SurfaceHolder holder, int format, int width, int height)890     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
891 
892     @Override
surfaceCreated(SurfaceHolder holder)893     public void surfaceCreated(SurfaceHolder holder) {
894         synchronized (syncToken) {
895             syncToken.notify();
896         }
897     }
898 
899     @Override
surfaceDestroyed(SurfaceHolder holder)900     public void surfaceDestroyed(SurfaceHolder holder) {}
901 
902 }
903 
904 /**
905  * Factory for building EGL and GLES that could render to GLSurfaceView.
906  * {@link GLSurfaceView} {@link EGL10} {@link GLES20}.
907  */
908 @TargetApi(16)
909 class GLSurfaceViewFactory extends VideoViewFactory {
910 
911     private static final String TAG = GLSurfaceViewFactory.class.getSimpleName();
912     private static final String NAME = "GLSurfaceView";
913 
914     private final Object surfaceSyncToken = new Object();
915 
916     private GLSurfaceViewThread glSurfaceViewThread;
917     private boolean byteBufferIsReady = false;
918 
GLSurfaceViewFactory()919     public GLSurfaceViewFactory() {}
920 
921     @Override
release()922     public void release() {
923         glSurfaceViewThread.release();
924         glSurfaceViewThread = null;
925     }
926 
927     @Override
getName()928     public String getName() {
929         return NAME;
930     }
931 
932     @Override
createView(Context context)933     public View createView(Context context) {
934         Log.i(TAG, "Creating a " + NAME);
935         // Do all GL rendering in the GL thread.
936         glSurfaceViewThread = new GLSurfaceViewThread();
937         glSurfaceViewThread.start();
938         // No necessary view to display, return null.
939         return null;
940     }
941 
942     @Override
waitForViewIsAvailable(long timeOutMs)943     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
944         final long start = SystemClock.elapsedRealtime();
945         while (SystemClock.elapsedRealtime() - start < timeOutMs
946                 && glSurfaceViewThread.getSurface() == null) {
947             synchronized (surfaceSyncToken) {
948                 try {
949                     surfaceSyncToken.wait(VIEW_WAITTIME_MS);
950                 } catch (InterruptedException e) {
951                     Log.e(TAG, "Exception occurred when waiting for the surface from"
952                             + " GLSurfaceView to become available.", e);
953                     throw new InterruptedException(e.getMessage());
954                 }
955             }
956         }
957         if (glSurfaceViewThread.getSurface() == null) {
958             throw new InterruptedException("Taking too long for the surface from"
959                     + " GLSurfaceView to become available.");
960         }
961         Log.i(TAG, NAME + " is available.");
962     }
963 
964     @Override
getSurface()965     public Surface getSurface() {
966         return glSurfaceViewThread.getSurface();
967     }
968 
969     @Override
getVideoViewSnapshot()970     public VideoViewSnapshot getVideoViewSnapshot() {
971         return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT);
972     }
973 
byteBufferIsReady()974     public boolean byteBufferIsReady() {
975         return byteBufferIsReady;
976     }
977 
getByteBuffer()978     public ByteBuffer getByteBuffer() {
979         return glSurfaceViewThread.getByteBuffer();
980     }
981 
getSyncToken()982     public Object getSyncToken() {
983         return surfaceSyncToken;
984     }
985 
986     /* Does all GL operations. */
987     private class GLSurfaceViewThread extends Thread
988             implements SurfaceTexture.OnFrameAvailableListener {
989 
990         private static final int FLOAT_SIZE_BYTES = 4;
991         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
992         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
993         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
994         private final CountDownLatch mDone = new CountDownLatch(1);
995         private FloatBuffer triangleVertices;
996         private float[] textureTransform = new float[16];
997 
998         private float[] triangleVerticesData = {
999             // X, Y, Z, U, V
1000             -1f, -1f,  0f,  0f,  1f,
1001              1f, -1f,  0f,  1f,  1f,
1002             -1f,  1f,  0f,  0f,  0f,
1003              1f,  1f,  0f,  1f,  0f,
1004         };
1005         // Make the top-left corner corresponds to texture coordinate
1006         // (0, 0). This complies with the transformation matrix obtained from
1007         // SurfaceTexture.getTransformMatrix.
1008 
1009         private static final String VERTEX_SHADER =
1010                 "attribute vec4 aPosition;\n"
1011                 + "attribute vec4 aTextureCoord;\n"
1012                 + "uniform mat4 uTextureTransform;\n"
1013                 + "varying vec2 vTextureCoord;\n"
1014                 + "void main() {\n"
1015                 + "    gl_Position = aPosition;\n"
1016                 + "    vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n"
1017                 + "}\n";
1018 
1019         private static final String FRAGMENT_SHADER =
1020                 "#extension GL_OES_EGL_image_external : require\n"
1021                 + "precision mediump float;\n"      // highp here doesn't seem to matter
1022                 + "varying vec2 vTextureCoord;\n"
1023                 + "uniform samplerExternalOES sTexture;\n"
1024                 + "void main() {\n"
1025                 + "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
1026                 + "}\n";
1027 
1028         private int glProgram;
1029         private int textureID = -1;
1030         private int aPositionHandle;
1031         private int aTextureHandle;
1032         private int uTextureTransformHandle;
1033         private EGLDisplay eglDisplay = null;
1034         private EGLContext eglContext = null;
1035         private EGLSurface eglSurface = null;
1036         private EGL10 egl10;
1037         private Surface surface = null;
1038         private SurfaceTexture surfaceTexture;
1039         private ByteBuffer byteBuffer;
1040         private Looper looper;
1041 
GLSurfaceViewThread()1042         public GLSurfaceViewThread() {}
1043 
1044         @Override
run()1045         public void run() {
1046             Looper.prepare();
1047             looper = Looper.myLooper();
1048             triangleVertices = ByteBuffer
1049                     .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES)
1050                     .order(ByteOrder.nativeOrder()).asFloatBuffer();
1051             triangleVertices.put(triangleVerticesData).position(0);
1052 
1053             eglSetup();
1054             makeCurrent();
1055             eglSurfaceCreated();
1056 
1057             surfaceTexture = new SurfaceTexture(getTextureId());
1058             surfaceTexture.setOnFrameAvailableListener(this);
1059             surface = new Surface(surfaceTexture);
1060             synchronized (surfaceSyncToken) {
1061                 surfaceSyncToken.notify();
1062             }
1063             // Store pixels from surface
1064             byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4);
1065             byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
1066             Looper.loop();
1067             surface.release();
1068             surfaceTexture.release();
1069             byteBufferIsReady = false;
1070             byteBuffer =  null;
1071             egl10.eglMakeCurrent(eglDisplay,
1072                 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
1073             egl10.eglDestroySurface(eglDisplay, eglSurface);
1074             egl10.eglDestroyContext(eglDisplay, eglContext);
1075             //TODO: uncomment following line after fixing crash in GL driver libGLESv2_adreno.so
1076             //TODO: see b/123755902
1077             //egl10.eglTerminate(eglDisplay);
1078             eglDisplay = EGL10.EGL_NO_DISPLAY;
1079             eglContext = EGL10.EGL_NO_CONTEXT;
1080             eglSurface = EGL10.EGL_NO_SURFACE;
1081             mDone.countDown();
1082         }
1083 
1084         @Override
onFrameAvailable(SurfaceTexture st)1085         public void onFrameAvailable(SurfaceTexture st) {
1086             checkGlError("before updateTexImage");
1087             surfaceTexture.updateTexImage();
1088             st.getTransformMatrix(textureTransform);
1089             drawFrame();
1090             saveFrame();
1091         }
1092 
1093         /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */
eglSetup()1094         public void eglSetup() {
1095             egl10 = (EGL10) EGLContext.getEGL();
1096             eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
1097             if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
1098                 throw new RuntimeException("unable to get egl10 display");
1099             }
1100             int[] version = new int[2];
1101             if (!egl10.eglInitialize(eglDisplay, version)) {
1102                 eglDisplay = null;
1103                 throw new RuntimeException("unable to initialize egl10");
1104             }
1105             // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
1106             int[] configAttribs = {
1107                 EGL10.EGL_RED_SIZE, 8,
1108                 EGL10.EGL_GREEN_SIZE, 8,
1109                 EGL10.EGL_BLUE_SIZE, 8,
1110                 EGL10.EGL_ALPHA_SIZE, 8,
1111                 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
1112                 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
1113                 EGL10.EGL_NONE
1114             };
1115             EGLConfig[] configs = new EGLConfig[1];
1116             int[] numConfigs = new int[1];
1117             if (!egl10.eglChooseConfig(
1118                     eglDisplay, configAttribs, configs, configs.length, numConfigs)) {
1119                 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
1120             }
1121             // Configure EGL context for OpenGL ES 2.0.
1122             int[] contextAttribs = {
1123                 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
1124                 EGL10.EGL_NONE
1125             };
1126             eglContext = egl10.eglCreateContext(
1127                     eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs);
1128             checkEglError("eglCreateContext");
1129             if (eglContext == null) {
1130                 throw new RuntimeException("null context");
1131             }
1132             // Create a pbuffer surface.
1133             int[] surfaceAttribs = {
1134                 EGL10.EGL_WIDTH, VIEW_WIDTH,
1135                 EGL10.EGL_HEIGHT, VIEW_HEIGHT,
1136                 EGL10.EGL_NONE
1137             };
1138             eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs);
1139             checkEglError("eglCreatePbufferSurface");
1140             if (eglSurface == null) {
1141                 throw new RuntimeException("surface was null");
1142             }
1143         }
1144 
release()1145         public void release() {
1146             looper.quit();
1147             try{
1148                 mDone.await();
1149             }
1150             catch(InterruptedException e) {
1151                 Log.e(TAG, "Interrupted waiting in release");
1152             }
1153         }
1154 
1155         /* Makes our EGL context and surface current. */
makeCurrent()1156         public void makeCurrent() {
1157             if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
1158                 throw new RuntimeException("eglMakeCurrent failed");
1159             }
1160             checkEglError("eglMakeCurrent");
1161         }
1162 
1163         /* Call this after the EGL Surface is created and made current. */
eglSurfaceCreated()1164         public void eglSurfaceCreated() {
1165             glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
1166             if (glProgram == 0) {
1167                 throw new RuntimeException("failed creating program");
1168             }
1169             aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
1170             checkLocation(aPositionHandle, "aPosition");
1171             aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
1172             checkLocation(aTextureHandle, "aTextureCoord");
1173             uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform");
1174             checkLocation(uTextureTransformHandle, "uTextureTransform");
1175 
1176             int[] textures = new int[1];
1177             GLES20.glGenTextures(1, textures, 0);
1178             checkGlError("glGenTextures");
1179             textureID = textures[0];
1180             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1181             checkGlError("glBindTexture");
1182 
1183             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
1184                     GLES20.GL_LINEAR);
1185             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
1186                     GLES20.GL_LINEAR);
1187             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
1188                     GLES20.GL_CLAMP_TO_EDGE);
1189             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
1190                     GLES20.GL_CLAMP_TO_EDGE);
1191             checkGlError("glTexParameter");
1192         }
1193 
drawFrame()1194         public void drawFrame() {
1195             GLES20.glUseProgram(glProgram);
1196             checkGlError("glUseProgram");
1197             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
1198             checkGlError("glActiveTexture");
1199             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1200             checkGlError("glBindTexture");
1201 
1202             triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
1203             GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
1204                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1205             checkGlError("glVertexAttribPointer aPositionHandle");
1206             GLES20.glEnableVertexAttribArray(aPositionHandle);
1207             checkGlError("glEnableVertexAttribArray aPositionHandle");
1208 
1209             triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
1210             GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
1211                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1212             checkGlError("glVertexAttribPointer aTextureHandle");
1213             GLES20.glEnableVertexAttribArray(aTextureHandle);
1214             checkGlError("glEnableVertexAttribArray aTextureHandle");
1215 
1216             GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0);
1217             checkGlError("glUniformMatrix uTextureTransformHandle");
1218 
1219             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
1220             checkGlError("glDrawArrays");
1221             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
1222         }
1223 
1224         /* Reads the pixels to a ByteBuffer. */
saveFrame()1225         public void saveFrame() {
1226             synchronized (surfaceSyncToken) {
1227                 byteBufferIsReady = false;
1228                 byteBuffer.clear();
1229                 GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA,
1230                         GLES20.GL_UNSIGNED_BYTE, byteBuffer);
1231                 byteBufferIsReady = true;
1232             }
1233         }
1234 
getTextureId()1235         public int getTextureId() {
1236             return textureID;
1237         }
1238 
getSurface()1239         public Surface getSurface() {
1240             return surface;
1241         }
1242 
getByteBuffer()1243         public ByteBuffer getByteBuffer() {
1244             return byteBuffer;
1245         }
1246 
loadShader(int shaderType, String source)1247         private int loadShader(int shaderType, String source) {
1248             int shader = GLES20.glCreateShader(shaderType);
1249             checkGlError("glCreateShader type=" + shaderType);
1250             GLES20.glShaderSource(shader, source);
1251             GLES20.glCompileShader(shader);
1252             int[] compiled = new int[1];
1253             GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
1254 
1255             if (compiled[0] == 0) {
1256                 Log.e(TAG, "Could not compile shader " + shaderType + ":");
1257                 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
1258                 GLES20.glDeleteShader(shader);
1259                 shader = 0;
1260             }
1261             return shader;
1262         }
1263 
createProgram(String vertexSource, String fragmentSource)1264         private int createProgram(String vertexSource, String fragmentSource) {
1265             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
1266             if (vertexShader == 0) {
1267                 return 0;
1268             }
1269             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
1270             if (pixelShader == 0) {
1271                 return 0;
1272             }
1273             int program = GLES20.glCreateProgram();
1274             if (program == 0) {
1275                 Log.e(TAG, "Could not create program");
1276             }
1277             GLES20.glAttachShader(program, vertexShader);
1278             checkGlError("glAttachShader");
1279             GLES20.glAttachShader(program, pixelShader);
1280             checkGlError("glAttachShader");
1281             GLES20.glLinkProgram(program);
1282             int[] linkStatus = new int[1];
1283             GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
1284 
1285             if (linkStatus[0] != GLES20.GL_TRUE) {
1286                 Log.e(TAG, "Could not link program: ");
1287                 Log.e(TAG, GLES20.glGetProgramInfoLog(program));
1288                 GLES20.glDeleteProgram(program);
1289                 program = 0;
1290             }
1291             return program;
1292         }
1293 
checkEglError(String msg)1294         private void checkEglError(String msg) {
1295             int error;
1296             if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) {
1297                 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
1298             }
1299         }
1300 
checkGlError(String op)1301         public void checkGlError(String op) {
1302             int error;
1303             if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
1304                 Log.e(TAG, op + ": glError " + error);
1305                 throw new RuntimeException(op + ": glError " + error);
1306             }
1307         }
1308 
checkLocation(int location, String label)1309         public void checkLocation(int location, String label) {
1310             if (location < 0) {
1311                 throw new RuntimeException("Unable to locate '" + label + "' in program");
1312             }
1313         }
1314     }
1315 
1316 }
1317 
1318 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */
1319 abstract class VideoViewSnapshot implements Runnable {
1320 
1321     public static final long SNAPSHOT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(30);
1322     public static final long SLEEP_TIME_MS = 30;
1323     public static final Object SYNC_TOKEN = new Object();
1324 
getBitmap()1325     public abstract Bitmap getBitmap();
1326 
isBitmapReady()1327     public abstract boolean isBitmapReady();
1328 
getSyncObject()1329     public abstract Object getSyncObject();
1330 
1331 }
1332 
1333 /* Runnable to get a bitmap from a texture view on the UI thread via a handler.
1334  * This class is to be used together with
1335  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1336  */
1337 class TextureViewSnapshot extends VideoViewSnapshot {
1338 
1339     private final TextureView tv;
1340     private Bitmap bitmap = null;
1341 
TextureViewSnapshot(TextureView tv)1342     public TextureViewSnapshot(TextureView tv) {
1343         this.tv = DecodeAccuracyTestBase.checkNotNull(tv);
1344     }
1345 
1346     @Override
run()1347     public void run() {
1348         bitmap = null;
1349         bitmap = tv.getBitmap();
1350         synchronized (SYNC_TOKEN) {
1351             SYNC_TOKEN.notify();
1352         }
1353     }
1354 
1355     @Override
getBitmap()1356     public Bitmap getBitmap() {
1357         return bitmap;
1358     }
1359 
1360     @Override
isBitmapReady()1361     public boolean isBitmapReady() {
1362         return bitmap != null;
1363     }
1364 
1365     @Override
getSyncObject()1366     public Object getSyncObject() {
1367         return SYNC_TOKEN;
1368     }
1369 
1370 }
1371 
1372 /**
1373  * Method to get bitmap of a {@link SurfaceView}.
1374  * Note that PixelCopy does not have to be called in a runnable.
1375  * This class is to be used together with
1376  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1377  */
1378 class SurfaceViewSnapshot extends VideoViewSnapshot  {
1379 
1380     private static final String TAG = SurfaceViewSnapshot.class.getSimpleName();
1381     private static final int PIXELCOPY_TIMEOUT_MS = 1000;
1382     private static final int INITIAL_STATE = -1;
1383 
1384     private final SurfaceView surfaceView;
1385     private final int width;
1386     private final int height;
1387 
1388     private Bitmap bitmap;
1389     private int copyResult;
1390 
SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height)1391     public SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height) {
1392         this.surfaceView = surfaceView;
1393         this.width = width;
1394         this.height = height;
1395         this.copyResult = INITIAL_STATE;
1396         this.bitmap = null;
1397     }
1398 
1399     @Override
run()1400     public void run() {
1401         final long start = SystemClock.elapsedRealtime();
1402         copyResult = INITIAL_STATE;
1403         final SynchronousPixelCopy copyHelper = new SynchronousPixelCopy();
1404         bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);
1405         try {
1406             // Wait for PixelCopy to finish.
1407             while ((copyResult = copyHelper.request(surfaceView, bitmap)) != PixelCopy.SUCCESS
1408                     && (SystemClock.elapsedRealtime() - start) < SNAPSHOT_TIMEOUT_MS) {
1409                 Thread.sleep(SLEEP_TIME_MS);
1410             }
1411         } catch (InterruptedException e) {
1412             Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e);
1413             bitmap = null;
1414         } finally {
1415             copyHelper.release();
1416             synchronized (SYNC_TOKEN) {
1417                 SYNC_TOKEN.notify();
1418             }
1419         }
1420     }
1421 
1422     @Override
getBitmap()1423     public Bitmap getBitmap() {
1424         return bitmap;
1425     }
1426 
1427     @Override
isBitmapReady()1428     public boolean isBitmapReady() {
1429         return bitmap != null && copyResult == PixelCopy.SUCCESS;
1430     }
1431 
1432     @Override
getSyncObject()1433     public Object getSyncObject() {
1434         return SYNC_TOKEN;
1435     }
1436 
1437     private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener {
1438 
1439         private final Handler handler;
1440         private final HandlerThread thread;
1441 
1442         private int status = INITIAL_STATE;
1443 
SynchronousPixelCopy()1444         public SynchronousPixelCopy() {
1445             this.thread = new HandlerThread("PixelCopyHelper");
1446             thread.start();
1447             this.handler = new Handler(thread.getLooper());
1448         }
1449 
release()1450         public void release() {
1451             if (thread.isAlive()) {
1452                 thread.quit();
1453             }
1454         }
1455 
request(SurfaceView source, Bitmap dest)1456         public int request(SurfaceView source, Bitmap dest) {
1457             synchronized (this) {
1458                 try {
1459                     PixelCopy.request(source, dest, this, handler);
1460                     return getResultLocked();
1461                 } catch (Exception e) {
1462                     Log.e(TAG, "Exception occurred when copying a SurfaceView.", e);
1463                     return -1;
1464                 }
1465             }
1466         }
1467 
getResultLocked()1468         private int getResultLocked() {
1469             try {
1470                 this.wait(PIXELCOPY_TIMEOUT_MS);
1471             } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ }
1472             return status;
1473         }
1474 
1475         @Override
onPixelCopyFinished(int copyResult)1476         public void onPixelCopyFinished(int copyResult) {
1477             synchronized (this) {
1478                 status = copyResult;
1479                 this.notify();
1480             }
1481         }
1482 
1483     }
1484 
1485 }
1486 
1487 /**
1488  * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler.
1489  * Note, because of how the bitmap is captured in GLSurfaceView,
1490  * this method does not have to be a runnable.
1491   * This class is to be used together with
1492  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1493  */
1494 class GLSurfaceViewSnapshot extends VideoViewSnapshot {
1495 
1496     private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName();
1497 
1498     private final GLSurfaceViewFactory glSurfaceViewFactory;
1499     private final int width;
1500     private final int height;
1501 
1502     private Bitmap bitmap = null;
1503     private boolean bitmapIsReady = false;
1504 
GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1505     public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) {
1506         this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory);
1507         this.width = width;
1508         this.height = height;
1509     }
1510 
1511     @Override
run()1512     public void run() {
1513         bitmapIsReady = false;
1514         bitmap = null;
1515         try {
1516             waitForByteBuffer();
1517         } catch (InterruptedException exception) {
1518             Log.e(TAG, exception.getMessage());
1519             bitmap = null;
1520             notifyObject();
1521             return;
1522         }
1523         try {
1524             final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer();
1525             bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
1526 
1527             final Object syncToken = glSurfaceViewFactory.getSyncToken();
1528             synchronized (syncToken) {
1529                 byteBuffer.rewind();
1530                 bitmap.copyPixelsFromBuffer(byteBuffer);
1531                 bitmapIsReady = true;
1532                 byteBuffer.clear();
1533             }
1534         } catch (NullPointerException exception) {
1535             Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception);
1536             bitmap = null;
1537         } finally {
1538             notifyObject();
1539         }
1540     }
1541 
1542     @Override
getBitmap()1543     public Bitmap getBitmap() {
1544         return bitmap;
1545     }
1546 
1547     @Override
isBitmapReady()1548     public boolean isBitmapReady() {
1549         return bitmapIsReady;
1550     }
1551 
1552     @Override
getSyncObject()1553     public Object getSyncObject() {
1554         return SYNC_TOKEN;
1555     }
1556 
notifyObject()1557     private void notifyObject() {
1558         synchronized (SYNC_TOKEN) {
1559             SYNC_TOKEN.notify();
1560         }
1561     }
1562 
waitForByteBuffer()1563     private void waitForByteBuffer() throws InterruptedException {
1564         // Wait for byte buffer to be ready.
1565         final long start = SystemClock.elapsedRealtime();
1566         while (SystemClock.elapsedRealtime() - start < SNAPSHOT_TIMEOUT_MS) {
1567             if (glSurfaceViewFactory.byteBufferIsReady()) {
1568                 return;
1569             }
1570             Thread.sleep(SLEEP_TIME_MS);
1571         }
1572         throw new InterruptedException("Taking too long to read pixels into a ByteBuffer.");
1573     }
1574 
1575 }
1576 
1577 /* Stores information of a video file. */
1578 class VideoFormat {
1579 
1580     public static final String STRING_UNSET = "UNSET";
1581     public static final int INT_UNSET = -1;
1582 
1583     private final String filename;
1584 
1585     private String mimeType = STRING_UNSET;
1586     private int width = INT_UNSET;
1587     private int height = INT_UNSET;
1588     private int maxWidth = INT_UNSET;
1589     private int maxHeight = INT_UNSET;
1590     private FilenameParser filenameParser;
1591 
VideoFormat(String filename)1592     public VideoFormat(String filename) {
1593         this.filename = filename;
1594     }
1595 
VideoFormat(VideoFormat videoFormat)1596     public VideoFormat(VideoFormat videoFormat) {
1597         this(videoFormat.filename);
1598     }
1599 
getParsedName()1600     private FilenameParser getParsedName() {
1601         if (filenameParser == null) {
1602             filenameParser = new FilenameParser(filename);
1603         }
1604         return filenameParser;
1605     }
1606 
getMediaFormat()1607     public String getMediaFormat() {
1608         return "video";
1609     }
1610 
setMimeType(String mimeType)1611     public void setMimeType(String mimeType) {
1612         this.mimeType = mimeType;
1613     }
1614 
getMimeType()1615     public String getMimeType() {
1616         if (mimeType.equals(STRING_UNSET)) {
1617             return getParsedName().getMimeType();
1618         }
1619         return mimeType;
1620     }
1621 
setWidth(int width)1622     public void setWidth(int width) {
1623         this.width = width;
1624     }
1625 
setMaxWidth(int maxWidth)1626     public void setMaxWidth(int maxWidth) {
1627         this.maxWidth = maxWidth;
1628     }
1629 
getWidth()1630     public int getWidth() {
1631         if (width == INT_UNSET) {
1632             return getParsedName().getWidth();
1633         }
1634         return width;
1635     }
1636 
getMaxWidth()1637     public int getMaxWidth() {
1638         return maxWidth;
1639     }
1640 
getOriginalWidth()1641     public int getOriginalWidth() {
1642         return getParsedName().getWidth();
1643     }
1644 
setHeight(int height)1645     public void setHeight(int height) {
1646         this.height = height;
1647     }
1648 
setMaxHeight(int maxHeight)1649     public void setMaxHeight(int maxHeight) {
1650         this.maxHeight = maxHeight;
1651     }
1652 
getHeight()1653     public int getHeight() {
1654         if (height == INT_UNSET) {
1655             return getParsedName().getHeight();
1656         }
1657         return height;
1658     }
1659 
getMaxHeight()1660     public int getMaxHeight() {
1661         return maxHeight;
1662     }
1663 
getOriginalHeight()1664     public int getOriginalHeight() {
1665         return getParsedName().getHeight();
1666     }
1667 
isAbrEnabled()1668     public boolean isAbrEnabled() {
1669         return false;
1670     }
1671 
getOriginalSize()1672     public String getOriginalSize() {
1673         if (width == INT_UNSET || height == INT_UNSET) {
1674             return getParsedName().getSize();
1675         }
1676         return width + "x" + height;
1677     }
1678 
getDescription()1679     public String getDescription() {
1680         return getParsedName().getDescription();
1681     }
1682 
toPrettyString()1683     public String toPrettyString() {
1684         return getParsedName().toPrettyString();
1685     }
1686 
getAssetFileDescriptor()1687     public AssetFileDescriptor getAssetFileDescriptor() throws FileNotFoundException {
1688         File inpFile = new File(WorkDir.getMediaDirString() + "assets/decode_accuracy/" + filename);
1689         ParcelFileDescriptor parcelFD =
1690                 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY);
1691         return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize());
1692     }
1693 
1694 }
1695 
1696 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */
1697 class FilenameParser {
1698 
1699     static final String VP9 = "vp9";
1700     static final String H264 = "h264";
1701 
1702     private final String filename;
1703 
1704     private String codec = VideoFormat.STRING_UNSET;
1705     private String description = VideoFormat.STRING_UNSET;
1706     private int width = VideoFormat.INT_UNSET;
1707     private int height = VideoFormat.INT_UNSET;
1708 
FilenameParser(String filename)1709     FilenameParser(String filename) {
1710         this.filename = filename;
1711         parseFilename(filename);
1712     }
1713 
getCodec()1714     public String getCodec() {
1715         return codec;
1716     }
1717 
getMimeType()1718     public String getMimeType() {
1719         switch (codec) {
1720             case H264:
1721                 return MimeTypes.VIDEO_H264;
1722             case VP9:
1723                 return MimeTypes.VIDEO_VP9;
1724             default:
1725                 return null;
1726         }
1727     }
1728 
getWidth()1729     public int getWidth() {
1730         return width;
1731     }
1732 
getHeight()1733     public int getHeight() {
1734         return height;
1735     }
1736 
getSize()1737     public String getSize() {
1738         return width + "x" + height;
1739     }
1740 
getDescription()1741     public String getDescription() {
1742         return description;
1743     }
1744 
toPrettyString()1745     String toPrettyString() {
1746         if (codec != null) {
1747             return codec.toUpperCase() + " " + getSize();
1748         }
1749         return filename;
1750     }
1751 
parseFilename(String filename)1752     private void parseFilename(String filename) {
1753         final String descriptionDelimiter = "-";
1754         final String infoDelimiter = "_";
1755         final String sizeDelimiter = "x";
1756         try {
1757             this.description = filename.split(descriptionDelimiter)[0];
1758             final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter);
1759             this.codec = fileInfo[0];
1760             this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]);
1761             this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]);
1762         } catch (Exception exception) { /* Filename format does not match. */ }
1763     }
1764 
1765 }
1766 
1767 /**
1768  * Compares bitmaps to determine if they are similar.
1769  *
1770  * <p>To determine greatest pixel difference we transform each pixel into the
1771  * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences.
1772  */
1773 class BitmapCompare {
1774     private static final String TAG = "BitmapCompare";
1775 
1776     private static final int RED = 0;
1777     private static final int GREEN = 1;
1778     private static final int BLUE = 2;
1779     private static final int X = 0;
1780     private static final int Y = 1;
1781     private static final int Z = 2;
1782 
BitmapCompare()1783     private BitmapCompare() {}
1784 
1785     /**
1786      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
1787      *
1788      * This simplified variant does not ignore any edge pixels.
1789      *
1790      * @param bitmap1 A bitmap to compare to bitmap2.
1791      * @param bitmap2 A bitmap to compare to bitmap1.
1792      * @return A {@link Difference} with an integer describing the greatest pixel difference,
1793      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
1794      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found.
1795      */
1796     @TargetApi(12)
computeDifference(Bitmap bitmap1, Bitmap bitmap2)1797     private static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) {
1798         return computeDifference(bitmap1, bitmap2, 0);
1799     }
1800 
1801     /**
1802      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
1803      *
1804      * @param bitmap1 A bitmap to compare to bitmap2.
1805      * @param bitmap2 A bitmap to compare to bitmap1.
1806      * @param ignorePixels number of pixels at each edge where we ignore the scoring. This
1807      *     is used for mainline and older base systems to bypass an edge behavior in the
1808      *     GPU code on those systems.
1809      * @return A {@link Difference} with an integer describing the greatest pixel difference,
1810      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
1811      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was
1812      *     first found.
1813      */
1814     @TargetApi(12)
computeDifference(Bitmap bitmap1, Bitmap bitmap2, int ignorePixels)1815     private static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2, int ignorePixels) {
1816         Log.i(TAG, "ignorePixels = " + ignorePixels);
1817         if (bitmap1 == null || bitmap2 == null) {
1818             return new Difference(Integer.MAX_VALUE);
1819         }
1820         if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) {
1821             return new Difference(0);
1822         }
1823         if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) {
1824             return new Difference(Integer.MAX_VALUE);
1825         }
1826         // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using
1827         // euclidean distance formula.
1828         final double[][] pixels1 = convertRgbToCieLab(bitmap1);
1829         final double[][] pixels2 = convertRgbToCieLab(bitmap2);
1830         int greatestDifference = -1;    // forces a legal index later...
1831         int greatestDifferenceIndex = -1;
1832         for (int i = 0; i < pixels1.length; i++) {
1833             // pixels within 'ignorePixels' of the edge are to be ignored for
1834             // scoring purposes.
1835             int x = i % bitmap1.getWidth();
1836             int y = i / bitmap1.getWidth();
1837             if (x < ignorePixels || x >= bitmap1.getWidth() - ignorePixels
1838                     || y < ignorePixels || y >= bitmap1.getHeight() - ignorePixels) {
1839                 continue;
1840             }
1841 
1842             final int difference = euclideanDistance(pixels1[i], pixels2[i]);
1843 
1844             if (difference > greatestDifference) {
1845                 greatestDifference = difference;
1846                 greatestDifferenceIndex = i;
1847             }
1848         }
1849 
1850         // huge ignorePixels values can get here without checking any pixels
1851         if (greatestDifferenceIndex == -1) {
1852             greatestDifferenceIndex = 0;
1853             greatestDifference = 0;
1854         }
1855         return new Difference(greatestDifference, Pair.create(
1856             greatestDifferenceIndex % bitmap1.getWidth(),
1857             greatestDifferenceIndex / bitmap1.getWidth()));
1858     }
1859 
1860     @SuppressLint("UseSparseArrays")
convertRgbToCieLab(Bitmap bitmap)1861     private static double[][] convertRgbToCieLab(Bitmap bitmap) {
1862         final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>();
1863         final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3];
1864         final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()];
1865         bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
1866         for (int i = 0; i < pixels.length; i++) {
1867             final double[] transformedColor = pixelTransformCache.get(pixels[i]);
1868             if (transformedColor != null) {
1869                 result[i] = transformedColor;
1870             } else {
1871                 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i]));
1872                 pixelTransformCache.put(pixels[i], result[i]);
1873             }
1874         }
1875         return result;
1876     }
1877 
1878     /**
1879      * Conversion from RGB to XYZ based algorithm as defined by:
1880      * http://www.easyrgb.com/index.php?X=MATH&H=02#text2
1881      *
1882      * <p><pre>{@code
1883      *   var_R = ( R / 255 )        //R from 0 to 255
1884      *   var_G = ( G / 255 )        //G from 0 to 255
1885      *   var_B = ( B / 255 )        //B from 0 to 255
1886      *
1887      *   if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4
1888      *   else                   var_R = var_R / 12.92
1889      *   if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4
1890      *   else                   var_G = var_G / 12.92
1891      *   if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4
1892      *   else                   var_B = var_B / 12.92
1893      *
1894      *   var_R = var_R * 100
1895      *   var_G = var_G * 100
1896      *   var_B = var_B * 100
1897      *
1898      *   // Observer. = 2°, Illuminant = D65
1899      *   X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805
1900      *   Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722
1901      *   Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505
1902      * }</pre>
1903      *
1904      * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue.
1905      * @return An array of doubles where each value is a component of the XYZ color space.
1906      */
convertRgbToXyz(int rgbColor)1907     private static double[] convertRgbToXyz(int rgbColor) {
1908         final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)};
1909         for (int i = 0; i < comp.length; i++) {
1910             comp[i] /= 255.0;
1911             if (comp[i] > 0.04045) {
1912                 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4);
1913             } else {
1914                 comp[i] /= 12.92;
1915             }
1916             comp[i] *= 100;
1917         }
1918         final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805);
1919         final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722);
1920         final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505);
1921         return new double[] {x, y, z};
1922     }
1923 
1924     /**
1925      * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by:
1926      * http://www.easyrgb.com/index.php?X=MATH&H=07#text7
1927      *
1928      * <p><pre>
1929      * {@code
1930      *   var_X = X / ref_X          //ref_X =  95.047   Observer= 2°, Illuminant= D65
1931      *   var_Y = Y / ref_Y          //ref_Y = 100.000
1932      *   var_Z = Z / ref_Z          //ref_Z = 108.883
1933      *
1934      *   if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 )
1935      *   else                    var_X = ( 7.787 * var_X ) + ( 16 / 116 )
1936      *   if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 )
1937      *   else                    var_Y = ( 7.787 * var_Y ) + ( 16 / 116 )
1938      *   if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 )
1939      *   else                    var_Z = ( 7.787 * var_Z ) + ( 16 / 116 )
1940      *
1941      *   CIE-L* = ( 116 * var_Y ) - 16
1942      *   CIE-a* = 500 * ( var_X - var_Y )
1943      *   CIE-b* = 200 * ( var_Y - var_Z )
1944      * }
1945      * </pre>
1946      *
1947      * @param comp An array of doubles where each value is a component of the XYZ color space.
1948      * @return An array of doubles where each value is a component of the CIE-L*a*b* color space.
1949      */
convertXyzToCieLab(double[] comp)1950     private static double[] convertXyzToCieLab(double[] comp) {
1951         comp[X] /= 95.047;
1952         comp[Y] /= 100.0;
1953         comp[Z] /= 108.883;
1954         for (int i = 0; i < comp.length; i++) {
1955             if (comp[i] > 0.008856) {
1956                 comp[i] = Math.pow(comp[i], (1.0 / 3.0));
1957             } else {
1958                 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0);
1959             }
1960         }
1961         final double l = (116 * comp[Y]) - 16;
1962         final double a = 500 * (comp[X] - comp[Y]);
1963         final double b = 200 * (comp[Y] - comp[Z]);
1964         return new double[] {l, a, b};
1965     }
1966 
euclideanDistance(double[] p1, double[] p2)1967     private static int euclideanDistance(double[] p1, double[] p2) {
1968         if (p1.length != p2.length) {
1969             return Integer.MAX_VALUE;
1970         }
1971         double result = 0;
1972         for (int i = 0; i < p1.length; i++) {
1973             result += Math.pow(p1[i] - p2[i], 2);
1974         }
1975         return (int) Math.round(Math.sqrt(result));
1976     }
1977 
1978     /**
1979      * Crops the border of the array representing an image by hBorderSize
1980      * pixels on the left and right borders, and by vBorderSize pixels on the
1981      * top and bottom borders (so the width is 2 * hBorderSize smaller and
1982      * the height is 2 * vBorderSize smaller), then scales the image up to
1983      * match the original size using bilinear interpolation.
1984      */
shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1985     private static Bitmap shrinkAndScaleBilinear(
1986             Bitmap input, double hBorderSize, double vBorderSize) {
1987 
1988         int width = input.getWidth();
1989         int height = input.getHeight();
1990 
1991         // Compute the proper step sizes
1992         double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1);
1993         double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1);
1994 
1995         // Read the input bitmap into RGB arrays.
1996         int[] inputPixels = new int[width * height];
1997         input.getPixels(inputPixels, 0, width, 0, 0, width, height);
1998         int[][] inputRgb = new int[width * height][3];
1999         for (int i = 0; i < width * height; ++i) {
2000             inputRgb[i][0] = Color.red(inputPixels[i]);
2001             inputRgb[i][1] = Color.green(inputPixels[i]);
2002             inputRgb[i][2] = Color.blue(inputPixels[i]);
2003         }
2004         inputPixels = null;
2005 
2006         // Prepare the output buffer.
2007         int[] outputPixels = new int[width * height];
2008 
2009         // Start the iteration. The first y coordinate is vBorderSize.
2010         double y = vBorderSize;
2011         for (int yIndex = 0; yIndex < height; ++yIndex) {
2012             // The first x coordinate is hBorderSize.
2013             double x = hBorderSize;
2014             for (int xIndex = 0; xIndex < width; ++xIndex) {
2015                 // Determine the square of interest.
2016                 int left = (int)x;    // This is floor(x).
2017                 int top = (int)y;     // This is floor(y).
2018                 int right = left + 1;
2019                 int bottom = top + 1;
2020 
2021                 // (u, v) is the fractional part of (x, y).
2022                 double u = x - (double)left;
2023                 double v = y - (double)top;
2024 
2025                 // Precompute necessary products to save time.
2026                 double p00 = (1.0 - u) * (1.0 - v);
2027                 double p01 = (1.0 - u) * v;
2028                 double p10 = u * (1.0 - v);
2029                 double p11 = u * v;
2030 
2031                 // Clamp the indices to prevent out-of-bound that may be caused
2032                 // by round-off error.
2033                 if (left >= width) left = width - 1;
2034                 if (top >= height) top = height - 1;
2035                 if (right >= width) right = width - 1;
2036                 if (bottom >= height) bottom = height - 1;
2037 
2038                 // Sample RGB values from the four corners.
2039                 int[] rgb00 = inputRgb[top * width + left];
2040                 int[] rgb01 = inputRgb[bottom * width + left];
2041                 int[] rgb10 = inputRgb[top * width + right];
2042                 int[] rgb11 = inputRgb[bottom * width + right];
2043 
2044                 // Interpolate each component of RGB separately.
2045                 int[] mixedColor = new int[3];
2046                 for (int k = 0; k < 3; ++k) {
2047                     mixedColor[k] = (int)Math.round(
2048                             p00 * (double) rgb00[k] + p01 * (double) rgb01[k]
2049                             + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]);
2050                 }
2051                 // Convert RGB to bitmap Color format and store.
2052                 outputPixels[yIndex * width + xIndex] = Color.rgb(
2053                         mixedColor[0], mixedColor[1], mixedColor[2]);
2054                 x += xInc;
2055             }
2056             y += yInc;
2057         }
2058         // Assemble the output buffer into a Bitmap object.
2059         return Bitmap.createBitmap(outputPixels, width, height, input.getConfig());
2060     }
2061 
2062     /**
2063      * Calls computeDifference on multiple cropped-and-scaled versions of
2064      * bitmap2.
2065      */
2066     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, Pair<Double, Double>[] borderCrops)2067     public static Difference computeMinimumDifference(
2068             Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, Pair<Double, Double>[] borderCrops) {
2069 
2070         // Compute the difference with the original image (bitmap2) first.
2071         Difference minDiff = computeDifference(bitmap1, bitmap2, ignorePixels);
2072         // Then go through the list of borderCrops.
2073         for (Pair<Double, Double> borderCrop : borderCrops) {
2074             // Compute the difference between bitmap1 and a transformed
2075             // version of bitmap2.
2076             Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second);
2077             Difference d = computeDifference(bitmap1, bitmap2s, ignorePixels);
2078             // Keep the minimum difference.
2079             if (d.greatestPixelDifference < minDiff.greatestPixelDifference) {
2080                 minDiff = d;
2081                 minDiff.bestMatchBorderCrop = borderCrop;
2082             }
2083         }
2084         return minDiff;
2085     }
2086 
2087     /**
2088      * Calls computeMinimumDifference on a default list of borderCrop.
2089      */
2090     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, int trueWidth, int trueHeight)2091     public static Difference computeMinimumDifference(
2092             Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, int trueWidth, int trueHeight) {
2093 
2094         double hBorder = (double) bitmap1.getWidth() / (double) trueWidth;
2095         double vBorder = (double) bitmap1.getHeight() / (double) trueHeight;
2096         double hBorderH = 0.5 * hBorder; // Half-texel horizontal border
2097         double vBorderH = 0.5 * vBorder; // Half-texel vertical border
2098         return computeMinimumDifference(
2099                 bitmap1,
2100                 bitmap2,
2101                 ignorePixels,
2102                 new Pair[] {
2103                     Pair.create(hBorderH, 0.0),
2104                     Pair.create(hBorderH, vBorderH),
2105                     Pair.create(0.0, vBorderH),
2106                     Pair.create(hBorder, 0.0),
2107                     Pair.create(hBorder, vBorder),
2108                     Pair.create(0.0, vBorder)
2109                 });
2110         // This default list of borderCrop comes from the behavior of
2111         // GLConsumer.computeTransformMatrix().
2112     }
2113 
2114     /* Describes the difference between two {@link Bitmap} instances. */
2115     public static final class Difference {
2116 
2117         public final int greatestPixelDifference;
2118         public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates;
2119         public Pair<Double, Double> bestMatchBorderCrop;
2120 
Difference(int greatestPixelDifference)2121         private Difference(int greatestPixelDifference) {
2122             this(greatestPixelDifference, null, Pair.create(0.0, 0.0));
2123         }
2124 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2125         private Difference(
2126                 int greatestPixelDifference,
2127                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) {
2128             this(greatestPixelDifference, greatestPixelDifferenceCoordinates,
2129                     Pair.create(0.0, 0.0));
2130         }
2131 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2132         private Difference(
2133                 int greatestPixelDifference,
2134                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates,
2135                 Pair<Double, Double> bestMatchBorderCrop) {
2136             this.greatestPixelDifference = greatestPixelDifference;
2137             this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates;
2138             this.bestMatchBorderCrop = bestMatchBorderCrop;
2139         }
2140     }
2141 
2142 }
2143 
2144 /* Wrapper for MIME types. */
2145 final class MimeTypes {
2146 
MimeTypes()2147     private MimeTypes() {}
2148 
2149     public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9";
2150     public static final String VIDEO_H264 = "video/avc";
2151 
isVideo(String mimeType)2152     public static boolean isVideo(String mimeType) {
2153         return mimeType.startsWith("video");
2154     }
2155 
2156 }
2157