1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import android.graphics.ImageFormat;
20 import android.media.Image;
21 import android.media.MediaCodec;
22 import android.media.MediaCodecInfo;
23 import android.media.MediaCodecList;
24 import android.media.MediaFormat;
25 import android.opengl.GLES20;
26 import android.platform.test.annotations.Presubmit;
27 import android.platform.test.annotations.RequiresDevice;
28 import android.test.AndroidTestCase;
29 import android.util.Log;
30 
31 import androidx.test.filters.FlakyTest;
32 import androidx.test.filters.SmallTest;
33 
34 import com.android.compatibility.common.util.MediaUtils;
35 
36 import java.io.FileOutputStream;
37 import java.io.IOException;
38 import java.nio.ByteBuffer;
39 import java.util.Arrays;
40 
41 import javax.microedition.khronos.opengles.GL10;
42 
43 /**
44  * Generates a series of video frames, encodes them, decodes them, and tests for significant
45  * divergence from the original.
46  * <p>
47  * We copy the data from the encoder's output buffers to the decoder's input buffers, running
48  * them in parallel.  The first buffer output for video/avc contains codec configuration data,
49  * which we must carefully forward to the decoder.
50  * <p>
51  * An alternative approach would be to save the output of the decoder as an mpeg4 video
52  * file, and read it back in from disk.  The data we're generating is just an elementary
53  * stream, so we'd need to perform additional steps to make that happen.
54  */
55 @Presubmit
56 @SmallTest
57 @RequiresDevice
58 // TODO: b/186001256
59 @FlakyTest
60 public class EncodeDecodeTest extends AndroidTestCase {
61     private static final String TAG = "EncodeDecodeTest";
62     private static final boolean VERBOSE = false;           // lots of logging
63     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
64     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test.";
65 
66     // parameters for the encoder
67                                                             // H.264 Advanced Video Coding
68     private static final String MIME_TYPE_AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
69     private static final String MIME_TYPE_VP8 = MediaFormat.MIMETYPE_VIDEO_VP8;
70     private static final int FRAME_RATE = 15;               // 15fps
71     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
72 
73     // movie length, in frames
74     private static final int NUM_FRAMES = 30;               // two seconds of video
75 
76     private static final int TEST_Y = 120;                  // YUV values for colored rect
77     private static final int TEST_U = 160;
78     private static final int TEST_V = 200;
79     private static final int TEST_R0 = 0;                   // RGB equivalent of {0,0,0} (BT.601)
80     private static final int TEST_G0 = 136;
81     private static final int TEST_B0 = 0;
82     private static final int TEST_R1 = 236;                 // RGB equivalent of {120,160,200} (BT.601)
83     private static final int TEST_G1 = 50;
84     private static final int TEST_B1 = 186;
85     private static final int TEST_R0_BT709 = 0;             // RGB equivalent of {0,0,0} (BT.709)
86     private static final int TEST_G0_BT709 = 77;
87     private static final int TEST_B0_BT709 = 0;
88     private static final int TEST_R1_BT709 = 250;           // RGB equivalent of {120,160,200} (BT.709)
89     private static final int TEST_G1_BT709 = 76;
90     private static final int TEST_B1_BT709 = 189;
91     private static final boolean USE_NDK = true;
92 
93     // size of a frame, in pixels
94     private int mWidth = -1;
95     private int mHeight = -1;
96     // bit rate, in bits per second
97     private int mBitRate = -1;
98     private String mMimeType = MIME_TYPE_AVC;
99 
100     // largest color component delta seen (i.e. actual vs. expected)
101     private int mLargestColorDelta;
102 
103     // validate YUV->RGB decoded frames against BT.601 and/or BT.709
104     private boolean mAllowBT601 = true;
105     private boolean mAllowBT709 = false;
106 
107     /**
108      * Tests streaming of AVC video through the encoder and decoder.  Data is encoded from
109      * a series of byte[] buffers and decoded into ByteBuffers.  The output is checked for
110      * validity.
111      */
testEncodeDecodeVideoFromBufferToBufferQCIF()112     public void testEncodeDecodeVideoFromBufferToBufferQCIF() throws Exception {
113         setParameters(176, 144, 1000000, MIME_TYPE_AVC, true, false);
114         encodeDecodeVideoFromBuffer(false);
115     }
testEncodeDecodeVideoFromBufferToBufferQVGA()116     public void testEncodeDecodeVideoFromBufferToBufferQVGA() throws Exception {
117         setParameters(320, 240, 2000000, MIME_TYPE_AVC, true, false);
118         encodeDecodeVideoFromBuffer(false);
119     }
testEncodeDecodeVideoFromBufferToBuffer720p()120     public void testEncodeDecodeVideoFromBufferToBuffer720p() throws Exception {
121         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, false);
122         encodeDecodeVideoFromBuffer(false);
123     }
124 
125     /**
126      * Tests streaming of VP8 video through the encoder and decoder.  Data is encoded from
127      * a series of byte[] buffers and decoded into ByteBuffers.  The output is checked for
128      * validity.
129      */
testVP8EncodeDecodeVideoFromBufferToBufferQCIF()130     public void testVP8EncodeDecodeVideoFromBufferToBufferQCIF() throws Exception {
131         setParameters(176, 144, 1000000, MIME_TYPE_VP8, true, false);
132         encodeDecodeVideoFromBuffer(false);
133     }
testVP8EncodeDecodeVideoFromBufferToBufferQVGA()134     public void testVP8EncodeDecodeVideoFromBufferToBufferQVGA() throws Exception {
135         setParameters(320, 240, 2000000, MIME_TYPE_VP8, true, false);
136         encodeDecodeVideoFromBuffer(false);
137     }
testVP8EncodeDecodeVideoFromBufferToBuffer720p()138     public void testVP8EncodeDecodeVideoFromBufferToBuffer720p() throws Exception {
139         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, false);
140         encodeDecodeVideoFromBuffer(false);
141     }
142 
143     /**
144      * Tests streaming of AVC video through the encoder and decoder.  Data is encoded from
145      * a series of byte[] buffers and decoded into Surfaces.  The output is checked for
146      * validity.
147      * <p>
148      * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this
149      * test on a thread that doesn't have a Looper configured.  If we don't, the test will
150      * pass, but we won't actually test the output because we'll never receive the "frame
151      * available" notifications".  The CTS test framework seems to be configuring a Looper on
152      * the test thread, so we have to hand control off to a new thread for the duration of
153      * the test.
154      */
testEncodeDecodeVideoFromBufferToSurfaceQCIF()155     public void testEncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable {
156         setParameters(176, 144, 1000000, MIME_TYPE_AVC, true, false);
157         BufferToSurfaceWrapper.runTest(this);
158     }
testEncodeDecodeVideoFromBufferToSurfaceQVGA()159     public void testEncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable {
160         setParameters(320, 240, 2000000, MIME_TYPE_AVC, true, false);
161         BufferToSurfaceWrapper.runTest(this);
162     }
testEncodeDecodeVideoFromBufferToSurface720p()163     public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable {
164         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, true);
165         BufferToSurfaceWrapper.runTest(this);
166     }
167 
168     /**
169      * Tests streaming of VP8 video through the encoder and decoder.  Data is encoded from
170      * a series of byte[] buffers and decoded into Surfaces.  The output is checked for
171      * validity.
172      */
testVP8EncodeDecodeVideoFromBufferToSurfaceQCIF()173     public void testVP8EncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable {
174         setParameters(176, 144, 1000000, MIME_TYPE_VP8, true, false);
175         BufferToSurfaceWrapper.runTest(this);
176     }
testVP8EncodeDecodeVideoFromBufferToSurfaceQVGA()177     public void testVP8EncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable {
178         setParameters(320, 240, 2000000, MIME_TYPE_VP8, true, false);
179         BufferToSurfaceWrapper.runTest(this);
180     }
testVP8EncodeDecodeVideoFromBufferToSurface720p()181     public void testVP8EncodeDecodeVideoFromBufferToSurface720p() throws Throwable {
182         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, true);
183         BufferToSurfaceWrapper.runTest(this);
184     }
185 
186     /** Wraps testEncodeDecodeVideoFromBuffer(true) */
187     private static class BufferToSurfaceWrapper implements Runnable {
188         private Throwable mThrowable;
189         private EncodeDecodeTest mTest;
190 
BufferToSurfaceWrapper(EncodeDecodeTest test)191         private BufferToSurfaceWrapper(EncodeDecodeTest test) {
192             mTest = test;
193         }
194 
195         @Override
run()196         public void run() {
197             try {
198                 mTest.encodeDecodeVideoFromBuffer(true);
199             } catch (Throwable th) {
200                 mThrowable = th;
201             }
202         }
203 
204         /**
205          * Entry point.
206          */
runTest(EncodeDecodeTest obj)207         public static void runTest(EncodeDecodeTest obj) throws Throwable {
208             BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj);
209             Thread th = new Thread(wrapper, "codec test");
210             th.start();
211             th.join();
212             if (wrapper.mThrowable != null) {
213                 throw wrapper.mThrowable;
214             }
215         }
216     }
217 
218     /**
219      * Tests streaming of AVC video through the encoder and decoder.  Data is provided through
220      * a Surface and decoded onto a Surface.  The output is checked for validity.
221      */
testEncodeDecodeVideoFromSurfaceToSurfaceQCIF()222     public void testEncodeDecodeVideoFromSurfaceToSurfaceQCIF() throws Throwable {
223         setParameters(176, 144, 1000000, MIME_TYPE_AVC, true, false);
224         SurfaceToSurfaceWrapper.runTest(this, false, false);
225     }
testEncodeDecodeVideoFromSurfaceToSurfaceQVGA()226     public void testEncodeDecodeVideoFromSurfaceToSurfaceQVGA() throws Throwable {
227         setParameters(320, 240, 2000000, MIME_TYPE_AVC, true, false);
228         SurfaceToSurfaceWrapper.runTest(this, false, false);
229     }
testEncodeDecodeVideoFromSurfaceToSurface720p()230     public void testEncodeDecodeVideoFromSurfaceToSurface720p() throws Throwable {
231         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, false);
232         SurfaceToSurfaceWrapper.runTest(this, false, false);
233     }
testEncodeDecodeVideoFromSurfaceToSurface720pNdk()234     public void testEncodeDecodeVideoFromSurfaceToSurface720pNdk() throws Throwable {
235         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, false);
236         SurfaceToSurfaceWrapper.runTest(this, false, USE_NDK);
237     }
238 
239     /**
240      * Tests streaming of AVC video through the encoder and decoder.  Data is provided through
241      * a PersistentSurface and decoded onto a Surface.  The output is checked for validity.
242      */
testEncodeDecodeVideoFromPersistentSurfaceToSurfaceQCIF()243     public void testEncodeDecodeVideoFromPersistentSurfaceToSurfaceQCIF() throws Throwable {
244         setParameters(176, 144, 1000000, MIME_TYPE_AVC, true, false);
245         SurfaceToSurfaceWrapper.runTest(this, true, false);
246     }
testEncodeDecodeVideoFromPersistentSurfaceToSurfaceQVGA()247     public void testEncodeDecodeVideoFromPersistentSurfaceToSurfaceQVGA() throws Throwable {
248         setParameters(320, 240, 2000000, MIME_TYPE_AVC, true, false);
249         SurfaceToSurfaceWrapper.runTest(this, true, false);
250     }
testEncodeDecodeVideoFromPersistentSurfaceToSurface720p()251     public void testEncodeDecodeVideoFromPersistentSurfaceToSurface720p() throws Throwable {
252         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, false);
253         SurfaceToSurfaceWrapper.runTest(this, true, false);
254     }
testEncodeDecodeVideoFromPersistentSurfaceToSurface720pNdk()255     public void testEncodeDecodeVideoFromPersistentSurfaceToSurface720pNdk() throws Throwable {
256         setParameters(1280, 720, 6000000, MIME_TYPE_AVC, true, false);
257         SurfaceToSurfaceWrapper.runTest(this, true, USE_NDK);
258     }
259 
260     /**
261      * Tests streaming of VP8 video through the encoder and decoder.  Data is provided through
262      * a Surface and decoded onto a Surface.  The output is checked for validity.
263      */
testVP8EncodeDecodeVideoFromSurfaceToSurfaceQCIF()264     public void testVP8EncodeDecodeVideoFromSurfaceToSurfaceQCIF() throws Throwable {
265         setParameters(176, 144, 1000000, MIME_TYPE_VP8, true, false);
266         SurfaceToSurfaceWrapper.runTest(this, false, false);
267     }
testVP8EncodeDecodeVideoFromSurfaceToSurfaceQVGA()268     public void testVP8EncodeDecodeVideoFromSurfaceToSurfaceQVGA() throws Throwable {
269         setParameters(320, 240, 2000000, MIME_TYPE_VP8, true, false);
270         SurfaceToSurfaceWrapper.runTest(this, false, false);
271     }
testVP8EncodeDecodeVideoFromSurfaceToSurface720p()272     public void testVP8EncodeDecodeVideoFromSurfaceToSurface720p() throws Throwable {
273         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, false);
274         SurfaceToSurfaceWrapper.runTest(this, false, false);
275     }
testVP8EncodeDecodeVideoFromSurfaceToSurface720pNdk()276     public void testVP8EncodeDecodeVideoFromSurfaceToSurface720pNdk() throws Throwable {
277         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, false);
278         SurfaceToSurfaceWrapper.runTest(this, false, USE_NDK);
279     }
280 
281     /**
282      * Tests streaming of VP8 video through the encoder and decoder.  Data is provided through
283      * a PersistentSurface and decoded onto a Surface.  The output is checked for validity.
284      */
testVP8EncodeDecodeVideoFromPersistentSurfaceToSurfaceQCIF()285     public void testVP8EncodeDecodeVideoFromPersistentSurfaceToSurfaceQCIF() throws Throwable {
286         setParameters(176, 144, 1000000, MIME_TYPE_VP8, true, false);
287         SurfaceToSurfaceWrapper.runTest(this, true, false);
288     }
testVP8EncodeDecodeVideoFromPersistentSurfaceToSurfaceQVGA()289     public void testVP8EncodeDecodeVideoFromPersistentSurfaceToSurfaceQVGA() throws Throwable {
290         setParameters(320, 240, 2000000, MIME_TYPE_VP8, true, false);
291         SurfaceToSurfaceWrapper.runTest(this, true, false);
292     }
testVP8EncodeDecodeVideoFromPersistentSurfaceToSurface720p()293     public void testVP8EncodeDecodeVideoFromPersistentSurfaceToSurface720p() throws Throwable {
294         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, false);
295         SurfaceToSurfaceWrapper.runTest(this, true, false);
296     }
testVP8EncodeDecodeVideoFromPersistentSurfaceToSurface720pNdk()297     public void testVP8EncodeDecodeVideoFromPersistentSurfaceToSurface720pNdk() throws Throwable {
298         setParameters(1280, 720, 6000000, MIME_TYPE_VP8, true, false);
299         SurfaceToSurfaceWrapper.runTest(this, true, USE_NDK);
300     }
301 
302     /** Wraps testEncodeDecodeVideoFromSurfaceToSurface() */
303     private static class SurfaceToSurfaceWrapper implements Runnable {
304         private Throwable mThrowable;
305         private EncodeDecodeTest mTest;
306         private boolean mUsePersistentInput;
307         private boolean mUseNdk;
308 
SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk)309         private SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk) {
310             mTest = test;
311             mUsePersistentInput = persistent;
312             mUseNdk = useNdk;
313         }
314 
315         @Override
run()316         public void run() {
317             if (mTest.shouldSkip()) {
318                 return;
319             }
320 
321             InputSurfaceInterface inputSurface = null;
322             try {
323                 if (!mUsePersistentInput) {
324                     mTest.encodeDecodeVideoFromSurfaceToSurface(null, mUseNdk);
325                 } else {
326                     Log.d(TAG, "creating persistent surface");
327                     if (mUseNdk) {
328                         inputSurface = NdkMediaCodec.createPersistentInputSurface();
329                     } else {
330                         inputSurface = new InputSurface(MediaCodec.createPersistentInputSurface());
331                     }
332 
333                     for (int i = 0; i < 3; i++) {
334                         Log.d(TAG, "test persistent surface - round " + i);
335                         mTest.encodeDecodeVideoFromSurfaceToSurface(inputSurface, mUseNdk);
336                     }
337                 }
338             } catch (Throwable th) {
339                 mThrowable = th;
340             } finally {
341                 if (inputSurface != null) {
342                     inputSurface.release();
343                 }
344             }
345         }
346 
347         /**
348          * Entry point.
349          */
runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk)350         public static void runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk)
351                 throws Throwable {
352             SurfaceToSurfaceWrapper wrapper =
353                     new SurfaceToSurfaceWrapper(obj, persisent, useNdk);
354             Thread th = new Thread(wrapper, "codec test");
355             th.start();
356             th.join();
357             if (wrapper.mThrowable != null) {
358                 throw wrapper.mThrowable;
359             }
360         }
361     }
362 
363     /**
364      * Sets the desired frame size and bit rate.
365      */
setParameters(int width, int height, int bitRate, String mimeType, boolean allowBT601, boolean allowBT709)366     protected void setParameters(int width, int height, int bitRate, String mimeType,
367 	        boolean allowBT601, boolean allowBT709) {
368         if ((width % 16) != 0 || (height % 16) != 0) {
369             Log.w(TAG, "WARNING: width or height not multiple of 16");
370         }
371         mWidth = width;
372         mHeight = height;
373         mBitRate = bitRate;
374         mMimeType = mimeType;
375         mAllowBT601 = allowBT601;
376         mAllowBT709 = allowBT709;
377     }
378 
shouldSkip()379     private boolean shouldSkip() {
380         if (!MediaUtils.hasEncoder(mMimeType)) {
381             return true;
382         }
383 
384         MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
385         format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
386         format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
387         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
388         if (!MediaUtils.checkEncoderForFormat(format)) {
389             return true;
390         }
391 
392         return false;
393     }
394 
395     /**
396      * Tests encoding and subsequently decoding video from frames generated into a buffer.
397      * <p>
398      * We encode several frames of a video test pattern using MediaCodec, then decode the
399      * output with MediaCodec and do some simple checks.
400      * <p>
401      * See http://b.android.com/37769 for a discussion of input format pitfalls.
402      */
encodeDecodeVideoFromBuffer(boolean toSurface)403     private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception {
404         if (shouldSkip()) {
405             return;
406         }
407 
408         MediaCodec encoder = null;
409         MediaCodec decoder = null;
410 
411         mLargestColorDelta = -1;
412 
413         try {
414             // We avoid the device-specific limitations on width and height by using values that
415             // are multiples of 16, which all tested devices seem to be able to handle.
416             MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
417             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
418             String codec = mcl.findEncoderForFormat(format);
419             if (codec == null) {
420                 // Don't fail CTS if they don't have an AVC codec (not here, anyway).
421                 Log.e(TAG, "Unable to find an appropriate codec for " + format);
422                 return;
423             }
424             if (VERBOSE) Log.d(TAG, "found codec: " + codec);
425 
426             String codec_decoder = mcl.findDecoderForFormat(format);
427             if (codec_decoder == null) {
428                 Log.e(TAG, "Unable to find an appropriate codec for " + format);
429                 return;
430             }
431 
432             // Create a MediaCodec for the desired codec, then configure it as an encoder with
433             // our desired properties.
434             encoder = MediaCodec.createByCodecName(codec);
435 
436             int colorFormat = selectColorFormat(encoder.getCodecInfo(), mMimeType);
437             if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat);
438 
439             // Set some properties.  Failing to specify some of these can cause the MediaCodec
440             // configure() call to throw an unhelpful exception.
441             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
442             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
443             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
444             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
445             if (VERBOSE) Log.d(TAG, "format: " + format);
446 
447             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
448             encoder.start();
449 
450             // Create a MediaCodec for the decoder, just based on the MIME type.  The various
451             // format details will be passed through the csd-0 meta-data later on.
452             decoder = MediaCodec.createByCodecName(codec_decoder);
453             if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName());
454 
455             doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface);
456         } finally {
457             if (VERBOSE) Log.d(TAG, "releasing codecs");
458             if (encoder != null) {
459                 encoder.stop();
460                 encoder.release();
461             }
462             if (decoder != null) {
463                 decoder.stop();
464                 decoder.release();
465             }
466 
467             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
468         }
469     }
470 
471     /**
472      * Tests encoding and subsequently decoding video from frames generated into a buffer.
473      * <p>
474      * We encode several frames of a video test pattern using MediaCodec, then decode the
475      * output with MediaCodec and do some simple checks.
476      */
encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk)477     private void encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk) throws Exception {
478         MediaCodecWrapper encoder = null;
479         MediaCodec decoder = null;
480         InputSurfaceInterface inputSurface = inSurf;
481         OutputSurface outputSurface = null;
482 
483         mLargestColorDelta = -1;
484 
485         try {
486             // We avoid the device-specific limitations on width and height by using values that
487             // are multiples of 16, which all tested devices seem to be able to handle.
488             MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
489             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
490             String codec = mcl.findEncoderForFormat(format);
491             if (codec == null) {
492                 // Don't fail CTS if they don't have an AVC codec (not here, anyway).
493                 Log.e(TAG, "Unable to find an appropriate codec for " + format);
494                 return;
495             }
496             if (VERBOSE) Log.d(TAG, "found codec: " + codec);
497 
498             int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
499 
500             // Set some properties.  Failing to specify some of these can cause the MediaCodec
501             // configure() call to throw an unhelpful exception.
502             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
503             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
504             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
505             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
506 
507             // Set color parameters
508             format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
509             format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL);
510             format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
511 
512             if (VERBOSE) Log.d(TAG, "format: " + format);
513 
514             // Create the output surface.
515             outputSurface = new OutputSurface(mWidth, mHeight);
516 
517             // Create a MediaCodec for the decoder, just based on the MIME type.  The various
518             // format details will be passed through the csd-0 meta-data later on.
519             String codec_decoder = mcl.findDecoderForFormat(format);
520             if (codec_decoder == null) {
521                 Log.e(TAG, "Unable to find an appropriate codec for " + format);
522                 return;
523             }
524             decoder = MediaCodec.createByCodecName(codec_decoder);
525             if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName());
526             decoder.configure(format, outputSurface.getSurface(), null, 0);
527             decoder.start();
528 
529             // Create a MediaCodec for the desired codec, then configure it as an encoder with
530             // our desired properties.  Request a Surface to use for input.
531             if (useNdk) {
532                 encoder = new NdkMediaCodec(codec);
533             }else {
534                 encoder = new SdkMediaCodec(MediaCodec.createByCodecName(codec));
535             }
536             encoder.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
537             if (inSurf != null) {
538                 Log.d(TAG, "using persistent surface");
539                 encoder.setInputSurface(inputSurface);
540                 inputSurface.updateSize(mWidth, mHeight);
541             } else {
542                 inputSurface = encoder.createInputSurface();
543             }
544             encoder.start();
545 
546             doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, decoder, outputSurface);
547         } finally {
548             if (VERBOSE) Log.d(TAG, "releasing codecs");
549             if (inSurf == null && inputSurface != null) {
550                 inputSurface.release();
551             }
552             if (outputSurface != null) {
553                 outputSurface.release();
554             }
555             if (encoder != null) {
556                 encoder.stop();
557                 encoder.release();
558             }
559             if (decoder != null) {
560                 decoder.stop();
561                 decoder.release();
562             }
563 
564             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
565         }
566     }
567 
568     /**
569      * Returns a color format that is supported by the codec and by this test code.  If no
570      * match is found, this throws a test failure -- the set of formats known to the test
571      * should be expanded for new platforms.
572      */
selectColorFormat(MediaCodecInfo codecInfo, String mimeType)573     private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
574         MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
575         for (int i = 0; i < capabilities.colorFormats.length; i++) {
576             int colorFormat = capabilities.colorFormats[i];
577             if (isRecognizedFormat(colorFormat)) {
578                 return colorFormat;
579             }
580         }
581         fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
582         return 0;   // not reached
583     }
584 
585     /**
586      * Returns true if this is a color format that this test code understands (i.e. we know how
587      * to read and generate frames in this format).
588      */
isRecognizedFormat(int colorFormat)589     private static boolean isRecognizedFormat(int colorFormat) {
590         switch (colorFormat) {
591             // these are the formats we know how to handle for this test
592             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
593             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
594             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
595             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
596             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
597                 return true;
598             default:
599                 return false;
600         }
601     }
602 
603     /**
604      * Returns true if the specified color format is semi-planar YUV.  Throws an exception
605      * if the color format is not recognized (e.g. not YUV).
606      */
isSemiPlanarYUV(int colorFormat)607     private static boolean isSemiPlanarYUV(int colorFormat) {
608         switch (colorFormat) {
609             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
610             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
611                 return false;
612             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
613             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
614             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
615                 return true;
616             default:
617                 throw new RuntimeException("unknown format " + colorFormat);
618         }
619     }
620 
621     /**
622      * Does the actual work for encoding frames from buffers of byte[].
623      */
doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, MediaCodec decoder, boolean toSurface)624     private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat,
625             MediaCodec decoder, boolean toSurface) {
626         final int TIMEOUT_USEC = 10000;
627         ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
628         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
629         ByteBuffer[] decoderInputBuffers = null;
630         ByteBuffer[] decoderOutputBuffers = null;
631         MediaCodec.BufferInfo decoderInfo = new MediaCodec.BufferInfo();
632         MediaCodec.BufferInfo encoderInfo = new MediaCodec.BufferInfo();
633         MediaFormat decoderOutputFormat = null;
634         int generateIndex = 0;
635         int checkIndex = 0;
636         int badFrames = 0;
637         boolean decoderConfigured = false;
638         OutputSurface outputSurface = null;
639 
640         // The size of a frame of video data, in the formats we handle, is stride*sliceHeight
641         // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels.  Application
642         // of algebra and assuming that stride==width and sliceHeight==height yields:
643         byte[] frameData = new byte[mWidth * mHeight * 3 / 2];
644 
645         // Just out of curiosity.
646         long rawSize = 0;
647         long encodedSize = 0;
648 
649         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
650         // stream, not a .mp4 file, so not all players will know what to do with it.
651         FileOutputStream outputStream = null;
652         if (DEBUG_SAVE_FILE) {
653             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
654             try {
655                 outputStream = new FileOutputStream(fileName);
656                 Log.d(TAG, "encoded output will be saved as " + fileName);
657             } catch (IOException ioe) {
658                 Log.w(TAG, "Unable to create debug output file " + fileName);
659                 throw new RuntimeException(ioe);
660             }
661         }
662 
663         if (toSurface) {
664             outputSurface = new OutputSurface(mWidth, mHeight);
665         }
666 
667         // Loop until the output side is done.
668         boolean inputDone = false;
669         boolean encoderDone = false;
670         boolean outputDone = false;
671         int encoderStatus = -1;
672         while (!outputDone) {
673             if (VERBOSE) Log.d(TAG, "loop");
674 
675 
676             // If we're not done submitting frames, generate a new one and submit it.  By
677             // doing this on every loop we're working to ensure that the encoder always has
678             // work to do.
679             //
680             // We don't really want a timeout here, but sometimes there's a delay opening
681             // the encoder device, so a short timeout can keep us from spinning hard.
682             if (!inputDone) {
683                 int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
684                 if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex);
685                 if (inputBufIndex >= 0) {
686                     long ptsUsec = computePresentationTime(generateIndex);
687                     if (generateIndex == NUM_FRAMES) {
688                         // Send an empty frame with the end-of-stream flag set.  If we set EOS
689                         // on a frame with data, that frame data will be ignored, and the
690                         // output will be short one frame.
691                         encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
692                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
693                         inputDone = true;
694                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
695                     } else {
696                         generateFrame(generateIndex, encoderColorFormat, frameData);
697 
698                         ByteBuffer inputBuf = encoder.getInputBuffer(inputBufIndex);
699                         // the buffer should be sized to hold one full frame
700                         assertTrue(inputBuf.capacity() >= frameData.length);
701                         inputBuf.clear();
702                         inputBuf.put(frameData);
703 
704                         encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0);
705                         if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc");
706                     }
707                     generateIndex++;
708                 } else {
709                     // either all in use, or we timed out during initial setup
710                     if (VERBOSE) Log.d(TAG, "input buffer not available");
711                 }
712             }
713 
714             // Check for output from the encoder.  If there's no output yet, we either need to
715             // provide more input, or we need to wait for the encoder to work its magic.  We
716             // can't actually tell which is the case, so if we can't get an output buffer right
717             // away we loop around and see if it wants more input.
718             //
719             // Once we get EOS from the encoder, we don't need to do this anymore.
720             if (!encoderDone) {
721                 MediaCodec.BufferInfo info = encoderInfo;
722                 if (encoderStatus < 0) {
723                     encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
724                 }
725                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
726                     // no output available yet
727                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
728                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
729                     // not expected for an encoder
730                     encoderOutputBuffers = encoder.getOutputBuffers();
731                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
732                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
733                     // expected on API 18+
734                     MediaFormat newFormat = encoder.getOutputFormat();
735                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
736                 } else if (encoderStatus < 0) {
737                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
738                 } else { // encoderStatus >= 0
739                     ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus);
740                     if (encodedData == null) {
741                         fail("encoderOutputBuffer " + encoderStatus + " was null");
742                     }
743 
744                     // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
745                     encodedData.position(info.offset);
746                     encodedData.limit(info.offset + info.size);
747 
748                     boolean releaseBuffer = false;
749                     if (!decoderConfigured) {
750                         // Codec config info.  Only expected on first packet.  One way to
751                         // handle this is to manually stuff the data into the MediaFormat
752                         // and pass that to configure().  We do that here to exercise the API.
753                         // For codecs that don't have codec config data (such as VP8),
754                         // initialize the decoder before trying to decode the first packet.
755                         assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ||
756                                    mMimeType.equals(MIME_TYPE_VP8));
757                         MediaFormat format =
758                                 MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
759                         if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
760                             format.setByteBuffer("csd-0", encodedData);
761                         decoder.configure(format, toSurface ? outputSurface.getSurface() : null,
762                                 null, 0);
763                         decoder.start();
764                         decoderInputBuffers = decoder.getInputBuffers();
765                         decoderOutputBuffers = decoder.getOutputBuffers();
766                         decoderConfigured = true;
767                         if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)");
768                     }
769                     if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
770                         // Get a decoder input buffer
771                         assertTrue(decoderConfigured);
772                         int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
773                         if (inputBufIndex >= 0) {
774                             ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
775                             inputBuf.clear();
776                             inputBuf.put(encodedData);
777                             decoder.queueInputBuffer(inputBufIndex, 0, info.size,
778                                     info.presentationTimeUs, info.flags);
779 
780                             encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
781                             if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
782                                     + (encoderDone ? " (EOS)" : ""));
783                             releaseBuffer = true;
784                         }
785                     } else {
786                         releaseBuffer = true;
787                     }
788                     if (releaseBuffer) {
789                         encodedSize += info.size;
790                         if (outputStream != null) {
791                             byte[] data = new byte[info.size];
792                             encodedData.position(info.offset);
793                             encodedData.get(data);
794                             try {
795                                 outputStream.write(data);
796                             } catch (IOException ioe) {
797                                 Log.w(TAG, "failed writing debug data to file");
798                                 throw new RuntimeException(ioe);
799                             }
800                         }
801                         encoder.releaseOutputBuffer(encoderStatus, false);
802                         encoderStatus = -1;
803                     }
804 
805                 }
806             }
807 
808             // Check for output from the decoder.  We want to do this on every loop to avoid
809             // the possibility of stalling the pipeline.  We use a short timeout to avoid
810             // burning CPU if the decoder is hard at work but the next frame isn't quite ready.
811             //
812             // If we're decoding to a Surface, we'll get notified here as usual but the
813             // ByteBuffer references will be null.  The data is sent to Surface instead.
814             if (decoderConfigured) {
815                 MediaCodec.BufferInfo info = decoderInfo;
816                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
817                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
818                     // no output available yet
819                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
820                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
821                     // The storage associated with the direct ByteBuffer may already be unmapped,
822                     // so attempting to access data through the old output buffer array could
823                     // lead to a native crash.
824                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
825                     decoderOutputBuffers = decoder.getOutputBuffers();
826                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
827                     // this happens before the first frame is returned
828                     decoderOutputFormat = decoder.getOutputFormat();
829                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
830                             decoderOutputFormat);
831                 } else if (decoderStatus < 0) {
832                     fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);
833                 } else {  // decoderStatus >= 0
834                     if (!toSurface) {
835                         ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
836                         Image outputImage = (checkIndex % 2 == 0) ? null : decoder.getOutputImage(decoderStatus);
837 
838                         outputFrame.position(info.offset);
839                         outputFrame.limit(info.offset + info.size);
840 
841                         rawSize += info.size;
842                         if (info.size == 0) {
843                             if (VERBOSE) Log.d(TAG, "got empty frame");
844                         } else {
845                             if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex);
846                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
847                                     info.presentationTimeUs);
848                             if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame, outputImage)) {
849                                 badFrames++;
850                             }
851                         }
852                         if (outputImage != null) {
853                             outputImage.close();
854                         }
855 
856                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
857                             if (VERBOSE) Log.d(TAG, "output EOS");
858                             outputDone = true;
859                         }
860                         decoder.releaseOutputBuffer(decoderStatus, false /*render*/);
861                     } else {
862                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
863                                 " (size=" + info.size + ")");
864                         rawSize += info.size;
865                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
866                             if (VERBOSE) Log.d(TAG, "output EOS");
867                             outputDone = true;
868                         }
869 
870                         boolean doRender = (info.size != 0);
871 
872                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
873                         // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
874                         // that the texture will be available before the call returns, so we
875                         // need to wait for the onFrameAvailable callback to fire.
876                         decoder.releaseOutputBuffer(decoderStatus, doRender);
877                         if (doRender) {
878                             if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
879                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
880                                     info.presentationTimeUs);
881                             outputSurface.awaitNewImage();
882                             outputSurface.drawImage();
883                             if (!checkSurfaceFrame(checkIndex++)) {
884                                 badFrames++;
885                             }
886                         }
887                     }
888                 }
889             }
890         }
891 
892         if (VERBOSE) Log.d(TAG, "decoded " + checkIndex + " frames at "
893                 + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize);
894         if (outputStream != null) {
895             try {
896                 outputStream.close();
897             } catch (IOException ioe) {
898                 Log.w(TAG, "failed closing debug file");
899                 throw new RuntimeException(ioe);
900             }
901         }
902 
903         if (outputSurface != null) {
904             outputSurface.release();
905         }
906 
907         if (checkIndex != NUM_FRAMES) {
908             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
909         }
910         if (badFrames != 0) {
911             fail("Found " + badFrames + " bad frames");
912         }
913     }
914 
915     /**
916      * Does the actual work for encoding and decoding from Surface to Surface.
917      */
doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder, InputSurfaceInterface inputSurface, MediaCodec decoder, OutputSurface outputSurface)918     private void doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder,
919             InputSurfaceInterface inputSurface, MediaCodec decoder,
920             OutputSurface outputSurface) {
921         final int TIMEOUT_USEC = 10000;
922         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
923         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
924         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
925         int generateIndex = 0;
926         int checkIndex = 0;
927         int badFrames = 0;
928 
929         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
930         // stream, not a .mp4 file, so not all players will know what to do with it.
931         FileOutputStream outputStream = null;
932         if (DEBUG_SAVE_FILE) {
933             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
934             try {
935                 outputStream = new FileOutputStream(fileName);
936                 Log.d(TAG, "encoded output will be saved as " + fileName);
937             } catch (IOException ioe) {
938                 Log.w(TAG, "Unable to create debug output file " + fileName);
939                 throw new RuntimeException(ioe);
940             }
941         }
942 
943         // Loop until the output side is done.
944         boolean inputDone = false;
945         boolean encoderDone = false;
946         boolean outputDone = false;
947         while (!outputDone) {
948             if (VERBOSE) Log.d(TAG, "loop");
949 
950             // If we're not done submitting frames, generate a new one and submit it.  The
951             // eglSwapBuffers call will block if the input is full.
952             if (!inputDone) {
953                 if (generateIndex == NUM_FRAMES) {
954                     // Send an empty frame with the end-of-stream flag set.
955                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
956                     encoder.signalEndOfInputStream();
957                     inputDone = true;
958                 } else {
959                     inputSurface.makeCurrent();
960                     generateSurfaceFrame(generateIndex);
961                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
962                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
963                     inputSurface.swapBuffers();
964                 }
965                 generateIndex++;
966             }
967 
968             // Assume output is available.  Loop until both assumptions are false.
969             boolean decoderOutputAvailable = true;
970             boolean encoderOutputAvailable = !encoderDone;
971             while (decoderOutputAvailable || encoderOutputAvailable) {
972                 // Start by draining any pending output from the decoder.  It's important to
973                 // do this before we try to stuff any more data in.
974                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
975                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
976                     // no output available yet
977                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
978                     decoderOutputAvailable = false;
979                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
980                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed (but we don't care)");
981                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
982                     // this happens before the first frame is returned
983                     MediaFormat decoderOutputFormat = decoder.getOutputFormat();
984                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
985                             decoderOutputFormat);
986                 } else if (decoderStatus < 0) {
987                     fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);
988                 } else {  // decoderStatus >= 0
989                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
990                             " (size=" + info.size + ")");
991                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
992                         if (VERBOSE) Log.d(TAG, "output EOS");
993                         outputDone = true;
994                     }
995 
996                     // The ByteBuffers are null references, but we still get a nonzero size for
997                     // the decoded data.
998                     boolean doRender = (info.size != 0);
999 
1000                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
1001                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
1002                     // that the texture will be available before the call returns, so we
1003                     // need to wait for the onFrameAvailable callback to fire.  If we don't
1004                     // wait, we risk dropping frames.
1005                     outputSurface.makeCurrent();
1006                     decoder.releaseOutputBuffer(decoderStatus, doRender);
1007                     if (doRender) {
1008                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
1009                                 info.presentationTimeUs);
1010                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
1011                         outputSurface.awaitNewImage();
1012                         outputSurface.drawImage();
1013                         if (!checkSurfaceFrame(checkIndex++)) {
1014                             badFrames++;
1015                         }
1016                     }
1017                 }
1018                 if (decoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
1019                     // Continue attempts to drain output.
1020                     continue;
1021                 }
1022 
1023                 // Decoder is drained, check to see if we've got a new buffer of output from
1024                 // the encoder.
1025                 if (!encoderDone) {
1026                     int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
1027                     if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
1028                         // no output available yet
1029                         if (VERBOSE) Log.d(TAG, "no output from encoder available");
1030                         encoderOutputAvailable = false;
1031                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1032                         // not expected for an encoder
1033                         encoderOutputBuffers = encoder.getOutputBuffers();
1034                         if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
1035                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1036                         // expected on API 18+
1037                         String newFormat = encoder.getOutputFormatString();
1038                         if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
1039                     } else if (encoderStatus < 0) {
1040                         fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
1041                     } else { // encoderStatus >= 0
1042                         ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus);
1043                         if (encodedData == null) {
1044                             fail("encoderOutputBuffer " + encoderStatus + " was null");
1045                         }
1046 
1047                         // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
1048                         encodedData.position(info.offset);
1049                         encodedData.limit(info.offset + info.size);
1050 
1051                         if (outputStream != null) {
1052                             byte[] data = new byte[info.size];
1053                             encodedData.get(data);
1054                             encodedData.position(info.offset);
1055                             try {
1056                                 outputStream.write(data);
1057                             } catch (IOException ioe) {
1058                                 Log.w(TAG, "failed writing debug data to file");
1059                                 throw new RuntimeException(ioe);
1060                             }
1061                         }
1062 
1063                         // Get a decoder input buffer, blocking until it's available.  We just
1064                         // drained the decoder output, so we expect there to be a free input
1065                         // buffer now or in the near future (i.e. this should never deadlock
1066                         // if the codec is meeting requirements).
1067                         //
1068                         // The first buffer of data we get will have the BUFFER_FLAG_CODEC_CONFIG
1069                         // flag set; the decoder will see this and finish configuring itself.
1070                         int inputBufIndex = decoder.dequeueInputBuffer(-1);
1071                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
1072                         inputBuf.clear();
1073                         inputBuf.put(encodedData);
1074                         decoder.queueInputBuffer(inputBufIndex, 0, info.size,
1075                                 info.presentationTimeUs, info.flags);
1076 
1077                         // If everything from the encoder has been passed to the decoder, we
1078                         // can stop polling the encoder output.  (This just an optimization.)
1079                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1080                             encoderDone = true;
1081                             encoderOutputAvailable = false;
1082                         }
1083                         if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
1084                                 + (encoderDone ? " (EOS)" : ""));
1085 
1086                         encoder.releaseOutputBuffer(encoderStatus, false);
1087                     }
1088                 }
1089             }
1090         }
1091 
1092         if (outputStream != null) {
1093             try {
1094                 outputStream.close();
1095             } catch (IOException ioe) {
1096                 Log.w(TAG, "failed closing debug file");
1097                 throw new RuntimeException(ioe);
1098             }
1099         }
1100 
1101         if (checkIndex != NUM_FRAMES) {
1102             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
1103         }
1104         if (badFrames != 0) {
1105             fail("Found " + badFrames + " bad frames");
1106         }
1107     }
1108 
1109 
1110     /**
1111      * Generates data for frame N into the supplied buffer.  We have an 8-frame animation
1112      * sequence that wraps around.  It looks like this:
1113      * <pre>
1114      *   0 1 2 3
1115      *   7 6 5 4
1116      * </pre>
1117      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.
1118      */
generateFrame(int frameIndex, int colorFormat, byte[] frameData)1119     private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) {
1120         final int HALF_WIDTH = mWidth / 2;
1121         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
1122 
1123         // Set to zero.  In YUV this is a dull green.
1124         Arrays.fill(frameData, (byte) 0);
1125 
1126         int startX, startY;
1127 
1128         frameIndex %= 8;
1129         //frameIndex = (frameIndex / 8) % 8;    // use this instead for debug -- easier to see
1130         if (frameIndex < 4) {
1131             startX = frameIndex * (mWidth / 4);
1132             startY = 0;
1133         } else {
1134             startX = (7 - frameIndex) * (mWidth / 4);
1135             startY = mHeight / 2;
1136         }
1137 
1138         for (int y = startY + (mHeight/2) - 1; y >= startY; --y) {
1139             for (int x = startX + (mWidth/4) - 1; x >= startX; --x) {
1140                 if (semiPlanar) {
1141                     // full-size Y, followed by UV pairs at half resolution
1142                     // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar
1143                     // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E
1144                     //        OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
1145                     frameData[y * mWidth + x] = (byte) TEST_Y;
1146                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
1147                         frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U;
1148                         frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V;
1149                     }
1150                 } else {
1151                     // full-size Y, followed by quarter-size U and quarter-size V
1152                     // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar
1153                     // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar
1154                     frameData[y * mWidth + x] = (byte) TEST_Y;
1155                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
1156                         frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U;
1157                         frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) +
1158                                   (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V;
1159                     }
1160                 }
1161             }
1162         }
1163     }
1164 
1165     /**
1166      * Performs a simple check to see if the frame is more or less right.
1167      * <p>
1168      * See {@link #generateFrame} for a description of the layout.  The idea is to sample
1169      * one pixel from the middle of the 8 regions, and verify that the correct one has
1170      * the non-background color.  We can't know exactly what the video encoder has done
1171      * with our frames, so we just check to see if it looks like more or less the right thing.
1172      *
1173      * @return true if the frame looks good
1174      */
checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image)1175     private boolean checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image) {
1176         // Check for color formats we don't understand.  There is no requirement for video
1177         // decoders to use a "mundane" format, so we just give a pass on proprietary formats.
1178         // e.g. Nexus 4 0x7FA30C03 OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
1179         int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
1180         if (!isRecognizedFormat(colorFormat)) {
1181             Log.d(TAG, "unable to check frame contents for colorFormat=" +
1182                     Integer.toHexString(colorFormat));
1183             return true;
1184         }
1185 
1186         boolean frameFailed = false;
1187         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
1188         int width = format.getInteger(MediaFormat.KEY_STRIDE,
1189                 format.getInteger(MediaFormat.KEY_WIDTH));
1190         int height = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT,
1191                 format.getInteger(MediaFormat.KEY_HEIGHT));
1192         int halfWidth = width / 2;
1193         int cropLeft = format.getInteger("crop-left");
1194         int cropRight = format.getInteger("crop-right");
1195         int cropTop = format.getInteger("crop-top");
1196         int cropBottom = format.getInteger("crop-bottom");
1197         if (image != null) {
1198             cropLeft = image.getCropRect().left;
1199             cropRight = image.getCropRect().right - 1;
1200             cropTop = image.getCropRect().top;
1201             cropBottom = image.getCropRect().bottom - 1;
1202         }
1203         int cropWidth = cropRight - cropLeft + 1;
1204         int cropHeight = cropBottom - cropTop + 1;
1205 
1206         assertEquals(mWidth, cropWidth);
1207         assertEquals(mHeight, cropHeight);
1208 
1209         for (int i = 0; i < 8; i++) {
1210             int x, y;
1211             if (i < 4) {
1212                 x = i * (mWidth / 4) + (mWidth / 8);
1213                 y = mHeight / 4;
1214             } else {
1215                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
1216                 y = (mHeight * 3) / 4;
1217             }
1218 
1219             y += cropTop;
1220             x += cropLeft;
1221 
1222             int testY, testU, testV;
1223             if (image != null) {
1224                 Image.Plane[] planes = image.getPlanes();
1225                 if (planes.length == 3 && image.getFormat() == ImageFormat.YUV_420_888) {
1226                     testY = planes[0].getBuffer().get(y * planes[0].getRowStride() + x * planes[0].getPixelStride()) & 0xff;
1227                     testU = planes[1].getBuffer().get((y/2) * planes[1].getRowStride() + (x/2) * planes[1].getPixelStride()) & 0xff;
1228                     testV = planes[2].getBuffer().get((y/2) * planes[2].getRowStride() + (x/2) * planes[2].getPixelStride()) & 0xff;
1229                 } else {
1230                     testY = testU = testV = 0;
1231                 }
1232             } else {
1233                 int off = frameData.position();
1234                 if (semiPlanar) {
1235                     // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
1236                     testY = frameData.get(off + y * width + x) & 0xff;
1237                     testU = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2)) & 0xff;
1238                     testV = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2) + 1) & 0xff;
1239                 } else {
1240                     // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar
1241                     testY = frameData.get(off + y * width + x) & 0xff;
1242                     testU = frameData.get(off + width*height + (y/2) * halfWidth + (x/2)) & 0xff;
1243                     testV = frameData.get(off + width*height + halfWidth * (height / 2) +
1244                             (y/2) * halfWidth + (x/2)) & 0xff;
1245                 }
1246             }
1247 
1248             int expY, expU, expV;
1249             if (i == frameIndex % 8) {
1250                 // colored rect
1251                 expY = TEST_Y;
1252                 expU = TEST_U;
1253                 expV = TEST_V;
1254             } else {
1255                 // should be our zeroed-out buffer
1256                 expY = expU = expV = 0;
1257             }
1258             if (!isColorClose(testY, expY) ||
1259                     !isColorClose(testU, expU) ||
1260                     !isColorClose(testV, expV)) {
1261                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": yuv=" + testY +
1262                         "," + testU + "," + testV + " vs. expected " + expY + "," + expU +
1263                         "," + expV + ")");
1264                 frameFailed = true;
1265             }
1266         }
1267 
1268         return !frameFailed;
1269     }
1270 
1271     /**
1272      * Generates a frame of data using GL commands.
1273      */
generateSurfaceFrame(int frameIndex)1274     private void generateSurfaceFrame(int frameIndex) {
1275         frameIndex %= 8;
1276 
1277         int startX, startY;
1278         if (frameIndex < 4) {
1279             // (0,0) is bottom-left in GL
1280             startX = frameIndex * (mWidth / 4);
1281             startY = mHeight / 2;
1282         } else {
1283             startX = (7 - frameIndex) * (mWidth / 4);
1284             startY = 0;
1285         }
1286 
1287         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
1288         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
1289         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
1290         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
1291         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
1292         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
1293         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
1294     }
1295 
1296     /**
1297      * Checks the frame for correctness.  Similar to {@link #checkFrame}, but uses GL to
1298      * read pixels from the current surface.
1299      *
1300      * @return true if the frame looks good
1301      */
checkSurfaceFrame(int frameIndex)1302     private boolean checkSurfaceFrame(int frameIndex) {
1303         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
1304         boolean frameFailed = false;
1305 
1306         for (int i = 0; i < 8; i++) {
1307             // Note the coordinates are inverted on the Y-axis in GL.
1308             int x, y;
1309             if (i < 4) {
1310                 x = i * (mWidth / 4) + (mWidth / 8);
1311                 y = (mHeight * 3) / 4;
1312             } else {
1313                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
1314                 y = mHeight / 4;
1315             }
1316 
1317             GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf);
1318             int r = pixelBuf.get(0) & 0xff;
1319             int g = pixelBuf.get(1) & 0xff;
1320             int b = pixelBuf.get(2) & 0xff;
1321             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
1322 
1323             int expR, expG, expB, expR_bt709, expG_bt709, expB_bt709;
1324             if (i == frameIndex % 8) {
1325                 // colored rect
1326                 expR = TEST_R1;
1327                 expG = TEST_G1;
1328                 expB = TEST_B1;
1329                 expR_bt709 = TEST_R1_BT709;
1330                 expG_bt709 = TEST_G1_BT709;
1331                 expB_bt709 = TEST_B1_BT709;
1332             } else {
1333                 // zero background color
1334                 expR = TEST_R0;
1335                 expG = TEST_G0;
1336                 expB = TEST_B0;
1337                 expR_bt709 = TEST_R0_BT709;
1338                 expG_bt709 = TEST_G0_BT709;
1339                 expB_bt709 = TEST_B0_BT709;
1340             }
1341 
1342             // Some decoders use BT.709 when converting HD (i.e. >= 720p)
1343             // frames from YUV to RGB, so check against both BT.601 and BT.709
1344             if (mAllowBT601 &&
1345                     isColorClose(r, expR) &&
1346                     isColorClose(g, expG) &&
1347                     isColorClose(b, expB)) {
1348                 // frame OK on BT.601
1349                 mAllowBT709 = false;
1350             } else if (mAllowBT709 &&
1351                            isColorClose(r, expR_bt709) &&
1352                            isColorClose(g, expG_bt709) &&
1353                            isColorClose(b, expB_bt709)) {
1354                 // frame OK on BT.709
1355                 mAllowBT601 = false;
1356             } else {
1357                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + " @ " + x + " " + y + ": rgb=" + r +
1358                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
1359                         "," + expB + ")");
1360                 frameFailed = true;
1361             }
1362         }
1363 
1364         return !frameFailed;
1365     }
1366 
1367     /**
1368      * Returns true if the actual color value is close to the expected color value.  Updates
1369      * mLargestColorDelta.
1370      */
isColorClose(int actual, int expected)1371     boolean isColorClose(int actual, int expected) {
1372         final int MAX_DELTA = 8;
1373         int delta = Math.abs(actual - expected);
1374         if (delta > mLargestColorDelta) {
1375             mLargestColorDelta = delta;
1376         }
1377         return (delta <= MAX_DELTA);
1378     }
1379 
1380     /**
1381      * Generates the presentation time for frame N, in microseconds.
1382      */
computePresentationTime(int frameIndex)1383     private static long computePresentationTime(int frameIndex) {
1384         return 132 + frameIndex * 1000000 / FRAME_RATE;
1385     }
1386 }
1387