1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import android.media.cts.R;
20 
21 import android.content.Context;
22 import android.content.res.AssetFileDescriptor;
23 import android.content.res.Resources;
24 import android.content.res.Resources.NotFoundException;
25 import android.graphics.Rect;
26 import android.graphics.ImageFormat;
27 import android.media.cts.CodecUtils;
28 import android.media.Image;
29 import android.media.Image.Plane;
30 import android.media.ImageReader;
31 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecInfo.VideoCapabilities;
35 import android.media.MediaCodecList;
36 import android.media.MediaExtractor;
37 import android.media.MediaFormat;
38 import android.os.Handler;
39 import android.os.HandlerThread;
40 import android.os.Looper;
41 import android.test.AndroidTestCase;
42 import android.util.Log;
43 import android.view.Surface;
44 
45 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
46 
47 import com.android.compatibility.common.util.MediaUtils;
48 
49 import java.io.File;
50 import java.io.FileOutputStream;
51 import java.io.InputStream;
52 import java.io.IOException;
53 import java.nio.ByteBuffer;
54 import java.util.Arrays;
55 import java.util.ArrayList;
56 import java.util.concurrent.LinkedBlockingQueue;
57 import java.util.concurrent.TimeUnit;
58 
59 /**
60  * Basic test for ImageReader APIs.
61  * <p>
62  * It uses MediaCodec to decode a short video stream, send the video frames to
63  * the surface provided by ImageReader. Then compare if output buffers of the
64  * ImageReader matches the output buffers of the MediaCodec. The video format
65  * used here is AVC although the compression format doesn't matter for this
66  * test. For decoder test, hw and sw decoders are tested,
67  * </p>
68  */
69 public class ImageReaderDecoderTest extends AndroidTestCase {
70     private static final String TAG = "ImageReaderDecoderTest";
71     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
72     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
73     private static final long DEFAULT_TIMEOUT_US = 10000;
74     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
75     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
76     private static final int NUM_FRAME_DECODED = 100;
77     // video decoders only support a single outstanding image with the consumer
78     private static final int MAX_NUM_IMAGES = 1;
79     private static final float COLOR_STDEV_ALLOWANCE = 5f;
80     private static final float COLOR_DELTA_ALLOWANCE = 5f;
81 
82     private final static int MODE_IMAGEREADER = 0;
83     private final static int MODE_IMAGE       = 1;
84 
85     private Resources mResources;
86     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
87     private ImageReader mReader;
88     private Surface mReaderSurface;
89     private HandlerThread mHandlerThread;
90     private Handler mHandler;
91     private ImageListener mImageListener;
92 
93     @Override
setContext(Context context)94     public void setContext(Context context) {
95         super.setContext(context);
96         mResources = mContext.getResources();
97     }
98 
99     @Override
setUp()100     protected void setUp() throws Exception {
101         super.setUp();
102         mHandlerThread = new HandlerThread(TAG);
103         mHandlerThread.start();
104         mHandler = new Handler(mHandlerThread.getLooper());
105         mImageListener = new ImageListener();
106     }
107 
108     @Override
tearDown()109     protected void tearDown() throws Exception {
110         mHandlerThread.quitSafely();
111         mHandler = null;
112     }
113 
114     static class MediaAsset {
MediaAsset(int resource, int width, int height)115         public MediaAsset(int resource, int width, int height) {
116             mResource = resource;
117             mWidth = width;
118             mHeight = height;
119         }
120 
getWidth()121         public int getWidth() {
122             return mWidth;
123         }
124 
getHeight()125         public int getHeight() {
126             return mHeight;
127         }
128 
getResource()129         public int getResource() {
130             return mResource;
131         }
132 
133         private final int mResource;
134         private final int mWidth;
135         private final int mHeight;
136     }
137 
138     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)139         public MediaAssets(String mime, MediaAsset... assets) {
140             mMime = mime;
141             mAssets = assets;
142         }
143 
getMime()144         public String getMime() {
145             return mMime;
146         }
147 
getAssets()148         public MediaAsset[] getAssets() {
149             return mAssets;
150         }
151 
152         private final String mMime;
153         private final MediaAsset[] mAssets;
154     }
155 
156     private static MediaAssets H263_ASSETS = new MediaAssets(
157             MediaFormat.MIMETYPE_VIDEO_H263,
158             new MediaAsset(R.raw.swirl_176x144_h263, 176, 144),
159             new MediaAsset(R.raw.swirl_352x288_h263, 352, 288),
160             new MediaAsset(R.raw.swirl_128x96_h263, 128, 96));
161 
162     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
163             MediaFormat.MIMETYPE_VIDEO_MPEG4,
164             new MediaAsset(R.raw.swirl_128x128_mpeg4, 128, 128),
165             new MediaAsset(R.raw.swirl_144x136_mpeg4, 144, 136),
166             new MediaAsset(R.raw.swirl_136x144_mpeg4, 136, 144),
167             new MediaAsset(R.raw.swirl_132x130_mpeg4, 132, 130),
168             new MediaAsset(R.raw.swirl_130x132_mpeg4, 130, 132));
169 
170     private static MediaAssets H264_ASSETS = new MediaAssets(
171             MediaFormat.MIMETYPE_VIDEO_AVC,
172             new MediaAsset(R.raw.swirl_128x128_h264, 128, 128),
173             new MediaAsset(R.raw.swirl_144x136_h264, 144, 136),
174             new MediaAsset(R.raw.swirl_136x144_h264, 136, 144),
175             new MediaAsset(R.raw.swirl_132x130_h264, 132, 130),
176             new MediaAsset(R.raw.swirl_130x132_h264, 130, 132));
177 
178     private static MediaAssets H265_ASSETS = new MediaAssets(
179             MediaFormat.MIMETYPE_VIDEO_HEVC,
180             new MediaAsset(R.raw.swirl_128x128_h265, 128, 128),
181             new MediaAsset(R.raw.swirl_144x136_h265, 144, 136),
182             new MediaAsset(R.raw.swirl_136x144_h265, 136, 144),
183             new MediaAsset(R.raw.swirl_132x130_h265, 132, 130),
184             new MediaAsset(R.raw.swirl_130x132_h265, 130, 132));
185 
186     private static MediaAssets VP8_ASSETS = new MediaAssets(
187             MediaFormat.MIMETYPE_VIDEO_VP8,
188             new MediaAsset(R.raw.swirl_128x128_vp8, 128, 128),
189             new MediaAsset(R.raw.swirl_144x136_vp8, 144, 136),
190             new MediaAsset(R.raw.swirl_136x144_vp8, 136, 144),
191             new MediaAsset(R.raw.swirl_132x130_vp8, 132, 130),
192             new MediaAsset(R.raw.swirl_130x132_vp8, 130, 132));
193 
194     private static MediaAssets VP9_ASSETS = new MediaAssets(
195             MediaFormat.MIMETYPE_VIDEO_VP9,
196             new MediaAsset(R.raw.swirl_128x128_vp9, 128, 128),
197             new MediaAsset(R.raw.swirl_144x136_vp9, 144, 136),
198             new MediaAsset(R.raw.swirl_136x144_vp9, 136, 144),
199             new MediaAsset(R.raw.swirl_132x130_vp9, 132, 130),
200             new MediaAsset(R.raw.swirl_130x132_vp9, 130, 132));
201 
202     static final float SWIRL_FPS = 12.f;
203 
204     class Decoder {
205         final private String mName;
206         final private String mMime;
207         final private VideoCapabilities mCaps;
208         final private ArrayList<MediaAsset> mAssets;
209 
isFlexibleFormatSupported(CodecCapabilities caps)210         boolean isFlexibleFormatSupported(CodecCapabilities caps) {
211             for (int c : caps.colorFormats) {
212                 if (c == COLOR_FormatYUV420Flexible) {
213                     return true;
214                 }
215             }
216             return false;
217         }
218 
Decoder(String name, MediaAssets assets, CodecCapabilities caps)219         Decoder(String name, MediaAssets assets, CodecCapabilities caps) {
220             mName = name;
221             mMime = assets.getMime();
222             mCaps = caps.getVideoCapabilities();
223             mAssets = new ArrayList<MediaAsset>();
224 
225             for (MediaAsset asset : assets.getAssets()) {
226                 if (mCaps.areSizeAndRateSupported(asset.getWidth(), asset.getHeight(), SWIRL_FPS)
227                         && isFlexibleFormatSupported(caps)) {
228                     mAssets.add(asset);
229                 }
230             }
231         }
232 
videoDecode(int mode, boolean checkSwirl)233         public boolean videoDecode(int mode, boolean checkSwirl) {
234             boolean skipped = true;
235             for (MediaAsset asset: mAssets) {
236                 // TODO: loop over all supported image formats
237                 int imageFormat = ImageFormat.YUV_420_888;
238                 int colorFormat = COLOR_FormatYUV420Flexible;
239                 videoDecode(asset, imageFormat, colorFormat, mode, checkSwirl);
240                 skipped = false;
241             }
242             return skipped;
243         }
244 
videoDecode( MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl)245         private void videoDecode(
246                 MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl) {
247             int video = asset.getResource();
248             int width = asset.getWidth();
249             int height = asset.getHeight();
250 
251             if (DEBUG) Log.d(TAG, "videoDecode " + mName + " " + width + "x" + height);
252 
253             MediaCodec decoder = null;
254             AssetFileDescriptor vidFD = null;
255 
256             MediaExtractor extractor = null;
257             File tmpFile = null;
258             InputStream is = null;
259             FileOutputStream os = null;
260             MediaFormat mediaFormat = null;
261             try {
262                 extractor = new MediaExtractor();
263 
264                 try {
265                     vidFD = mResources.openRawResourceFd(video);
266                     extractor.setDataSource(
267                             vidFD.getFileDescriptor(), vidFD.getStartOffset(), vidFD.getLength());
268                 } catch (NotFoundException e) {
269                     // resource is compressed, uncompress locally
270                     String tmpName = "tempStream";
271                     tmpFile = File.createTempFile(tmpName, null, mContext.getCacheDir());
272                     is = mResources.openRawResource(video);
273                     os = new FileOutputStream(tmpFile);
274                     byte[] buf = new byte[1024];
275                     int len;
276                     while ((len = is.read(buf, 0, buf.length)) > 0) {
277                         os.write(buf, 0, len);
278                     }
279                     os.close();
280                     is.close();
281 
282                     extractor.setDataSource(tmpFile.getAbsolutePath());
283                 }
284 
285                 mediaFormat = extractor.getTrackFormat(0);
286                 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
287 
288                 // Create decoder
289                 decoder = MediaCodec.createByCodecName(mName);
290                 assertNotNull("couldn't create decoder" + mName, decoder);
291 
292                 decodeFramesToImage(
293                         decoder, extractor, mediaFormat,
294                         width, height, imageFormat, mode, checkSwirl);
295 
296                 decoder.stop();
297                 if (vidFD != null) {
298                     vidFD.close();
299                 }
300             } catch (Throwable e) {
301                 throw new RuntimeException("while " + mName + " decoding "
302                         + mResources.getResourceEntryName(video) + ": " + mediaFormat, e);
303             } finally {
304                 if (decoder != null) {
305                     decoder.release();
306                 }
307                 if (extractor != null) {
308                     extractor.release();
309                 }
310                 if (tmpFile != null) {
311                     tmpFile.delete();
312                 }
313             }
314         }
315     }
316 
decoders(MediaAssets assets, boolean goog)317     private Decoder[] decoders(MediaAssets assets, boolean goog) {
318         String mime = assets.getMime();
319         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
320         ArrayList<Decoder> result = new ArrayList<Decoder>();
321 
322         for (MediaCodecInfo info : mcl.getCodecInfos()) {
323             if (info.isEncoder()
324                     || info.getName().toLowerCase().startsWith("omx.google.") != goog) {
325                 continue;
326             }
327             CodecCapabilities caps = null;
328             try {
329                 caps = info.getCapabilitiesForType(mime);
330             } catch (IllegalArgumentException e) { // mime is not supported
331                 continue;
332             }
333             assertNotNull(info.getName() + " capabilties for " + mime + " returned null", caps);
334             result.add(new Decoder(info.getName(), assets, caps));
335         }
336         return result.toArray(new Decoder[result.size()]);
337     }
338 
goog(MediaAssets assets)339     private Decoder[] goog(MediaAssets assets) {
340         return decoders(assets, true /* goog */);
341     }
342 
other(MediaAssets assets)343     private Decoder[] other(MediaAssets assets) {
344         return decoders(assets, false /* goog */);
345     }
346 
googH265()347     private Decoder[] googH265()  { return goog(H265_ASSETS); }
googH264()348     private Decoder[] googH264()  { return goog(H264_ASSETS); }
googH263()349     private Decoder[] googH263()  { return goog(H263_ASSETS); }
googMpeg4()350     private Decoder[] googMpeg4() { return goog(MPEG4_ASSETS); }
googVP8()351     private Decoder[] googVP8()   { return goog(VP8_ASSETS); }
googVP9()352     private Decoder[] googVP9()   { return goog(VP9_ASSETS); }
353 
otherH265()354     private Decoder[] otherH265()  { return other(H265_ASSETS); }
otherH264()355     private Decoder[] otherH264()  { return other(H264_ASSETS); }
otherH263()356     private Decoder[] otherH263()  { return other(H263_ASSETS); }
otherMpeg4()357     private Decoder[] otherMpeg4() { return other(MPEG4_ASSETS); }
otherVP8()358     private Decoder[] otherVP8()   { return other(VP8_ASSETS); }
otherVP9()359     private Decoder[] otherVP9()   { return other(VP9_ASSETS); }
360 
testGoogH265Image()361     public void testGoogH265Image()   { swirlTest(googH265(),   MODE_IMAGE); }
testGoogH264Image()362     public void testGoogH264Image()   { swirlTest(googH264(),   MODE_IMAGE); }
testGoogH263Image()363     public void testGoogH263Image()   { swirlTest(googH263(),   MODE_IMAGE); }
testGoogMpeg4Image()364     public void testGoogMpeg4Image()  { swirlTest(googMpeg4(),  MODE_IMAGE); }
testGoogVP8Image()365     public void testGoogVP8Image()    { swirlTest(googVP8(),    MODE_IMAGE); }
testGoogVP9Image()366     public void testGoogVP9Image()    { swirlTest(googVP9(),    MODE_IMAGE); }
367 
testOtherH265Image()368     public void testOtherH265Image()  { swirlTest(otherH265(),  MODE_IMAGE); }
testOtherH264Image()369     public void testOtherH264Image()  { swirlTest(otherH264(),  MODE_IMAGE); }
testOtherH263Image()370     public void testOtherH263Image()  { swirlTest(otherH263(),  MODE_IMAGE); }
testOtherMpeg4Image()371     public void testOtherMpeg4Image() { swirlTest(otherMpeg4(), MODE_IMAGE); }
testOtherVP8Image()372     public void testOtherVP8Image()   { swirlTest(otherVP8(),   MODE_IMAGE); }
testOtherVP9Image()373     public void testOtherVP9Image()   { swirlTest(otherVP9(),   MODE_IMAGE); }
374 
testGoogH265ImageReader()375     public void testGoogH265ImageReader()   { swirlTest(googH265(),   MODE_IMAGEREADER); }
testGoogH264ImageReader()376     public void testGoogH264ImageReader()   { swirlTest(googH264(),   MODE_IMAGEREADER); }
testGoogH263ImageReader()377     public void testGoogH263ImageReader()   { swirlTest(googH263(),   MODE_IMAGEREADER); }
testGoogMpeg4ImageReader()378     public void testGoogMpeg4ImageReader()  { swirlTest(googMpeg4(),  MODE_IMAGEREADER); }
testGoogVP8ImageReader()379     public void testGoogVP8ImageReader()    { swirlTest(googVP8(),    MODE_IMAGEREADER); }
testGoogVP9ImageReader()380     public void testGoogVP9ImageReader()    { swirlTest(googVP9(),    MODE_IMAGEREADER); }
381 
testOtherH265ImageReader()382     public void testOtherH265ImageReader()  { swirlTest(otherH265(),  MODE_IMAGEREADER); }
testOtherH264ImageReader()383     public void testOtherH264ImageReader()  { swirlTest(otherH264(),  MODE_IMAGEREADER); }
testOtherH263ImageReader()384     public void testOtherH263ImageReader()  { swirlTest(otherH263(),  MODE_IMAGEREADER); }
testOtherMpeg4ImageReader()385     public void testOtherMpeg4ImageReader() { swirlTest(otherMpeg4(), MODE_IMAGEREADER); }
testOtherVP8ImageReader()386     public void testOtherVP8ImageReader()   { swirlTest(otherVP8(),   MODE_IMAGEREADER); }
testOtherVP9ImageReader()387     public void testOtherVP9ImageReader()   { swirlTest(otherVP9(),   MODE_IMAGEREADER); }
388 
389     /**
390      * Test ImageReader with 480x360 non-google AVC decoding for flexible yuv format
391      */
testHwAVCDecode360pForFlexibleYuv()392     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
393         Decoder[] decoders = other(new MediaAssets(
394                 MediaFormat.MIMETYPE_VIDEO_AVC,
395                 new MediaAsset(
396                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
397                         480 /* width */, 360 /* height */)));
398 
399         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
400     }
401 
402     /**
403      * Test ImageReader with 480x360 google (SW) AVC decoding for flexible yuv format
404      */
testSwAVCDecode360pForFlexibleYuv()405     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
406         Decoder[] decoders = goog(new MediaAssets(
407                 MediaFormat.MIMETYPE_VIDEO_AVC,
408                 new MediaAsset(
409                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
410                         480 /* width */, 360 /* height */)));
411 
412         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
413     }
414 
swirlTest(Decoder[] decoders, int mode)415     private void swirlTest(Decoder[] decoders, int mode) {
416         decodeTest(decoders, mode, true /* checkSwirl */);
417     }
418 
decodeTest(Decoder[] decoders, int mode, boolean checkSwirl)419     private void decodeTest(Decoder[] decoders, int mode, boolean checkSwirl) {
420         try {
421             boolean skipped = true;
422             for (Decoder codec : decoders) {
423                 if (codec.videoDecode(mode, checkSwirl)) {
424                     skipped = false;
425                 }
426             }
427             if (skipped) {
428                 MediaUtils.skipTest("decoder does not any of the input files");
429             }
430         } finally {
431             closeImageReader();
432         }
433     }
434 
435     private static class ImageListener implements ImageReader.OnImageAvailableListener {
436         private final LinkedBlockingQueue<Image> mQueue =
437                 new LinkedBlockingQueue<Image>();
438 
439         @Override
onImageAvailable(ImageReader reader)440         public void onImageAvailable(ImageReader reader) {
441             try {
442                 mQueue.put(reader.acquireNextImage());
443             } catch (InterruptedException e) {
444                 throw new UnsupportedOperationException(
445                         "Can't handle InterruptedException in onImageAvailable");
446             }
447         }
448 
449         /**
450          * Get an image from the image reader.
451          *
452          * @param timeout Timeout value for the wait.
453          * @return The image from the image reader.
454          */
getImage(long timeout)455         public Image getImage(long timeout) throws InterruptedException {
456             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
457             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
458             return image;
459         }
460     }
461 
462     /**
463      * Decode video frames to image reader.
464      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)465     private void decodeFramesToImage(
466             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
467             int width, int height, int imageFormat, int mode, boolean checkSwirl)
468             throws InterruptedException {
469         ByteBuffer[] decoderInputBuffers;
470         ByteBuffer[] decoderOutputBuffers;
471 
472         // Configure decoder.
473         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
474         if (mode == MODE_IMAGEREADER) {
475             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
476             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
477         } else {
478             assertEquals(mode, MODE_IMAGE);
479             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
480         }
481 
482         decoder.start();
483         decoderInputBuffers = decoder.getInputBuffers();
484         decoderOutputBuffers = decoder.getOutputBuffers();
485         extractor.selectTrack(0);
486 
487         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
488         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
489         boolean sawInputEOS = false;
490         boolean sawOutputEOS = false;
491         int outputFrameCount = 0;
492         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
493             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
494             // Feed input frame.
495             if (!sawInputEOS) {
496                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
497                 if (inputBufIndex >= 0) {
498                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
499                     int sampleSize =
500                         extractor.readSampleData(dstBuf, 0 /* offset */);
501 
502                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
503                         + inputBufIndex + "/" + sampleSize);
504 
505                     long presentationTimeUs = 0;
506 
507                     if (sampleSize < 0) {
508                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
509                         sawInputEOS = true;
510                         sampleSize = 0;
511                     } else {
512                         presentationTimeUs = extractor.getSampleTime();
513                     }
514 
515                     decoder.queueInputBuffer(
516                             inputBufIndex,
517                             0 /* offset */,
518                             sampleSize,
519                             presentationTimeUs,
520                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
521 
522                     if (!sawInputEOS) {
523                         extractor.advance();
524                     }
525                 }
526             }
527 
528             // Get output frame
529             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
530             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
531             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
532                 // no output available yet
533                 if (VERBOSE) Log.v(TAG, "no output frame available");
534             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
535                 // decoder output buffers changed, need update.
536                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
537                 decoderOutputBuffers = decoder.getOutputBuffers();
538             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
539                 // this happens before the first frame is returned.
540                 MediaFormat outFormat = decoder.getOutputFormat();
541                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
542             } else if (res < 0) {
543                 // Should be decoding error.
544                 fail("unexpected result from deocder.dequeueOutputBuffer: " + res);
545             } else {
546                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
547                     sawOutputEOS = true;
548                 }
549 
550                 // res >= 0: normal decoding case, copy the output buffer.
551                 // Will use it as reference to valid the ImageReader output
552                 // Some decoders output a 0-sized buffer at the end. Ignore those.
553                 boolean doRender = (info.size != 0);
554 
555                 if (doRender) {
556                     outputFrameCount++;
557                     String fileName = DEBUG_FILE_NAME_BASE + MediaUtils.getTestName()
558                             + (mode == MODE_IMAGE ? "_image_" : "_reader_")
559                             + width + "x" + height + "_" + outputFrameCount + ".yuv";
560 
561                     Image image = null;
562                     try {
563                         if (mode == MODE_IMAGE) {
564                             image = decoder.getOutputImage(res);
565                         } else {
566                             decoder.releaseOutputBuffer(res, doRender);
567                             res = -1;
568                             // Read image and verify
569                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
570                         }
571                         validateImage(image, width, height, imageFormat, fileName);
572 
573                         if (checkSwirl) {
574                             try {
575                                 validateSwirl(image);
576                             } catch (Throwable e) {
577                                 dumpFile(fileName, getDataFromImage(image));
578                                 throw e;
579                             }
580                         }
581                     } finally {
582                         if (image != null) {
583                             image.close();
584                         }
585                     }
586                 }
587 
588                 if (res >= 0) {
589                     decoder.releaseOutputBuffer(res, false /* render */);
590                 }
591             }
592         }
593     }
594 
595     /**
596      * Validate image based on format and size.
597      *
598      * @param image The image to be validated.
599      * @param width The image width.
600      * @param height The image height.
601      * @param format The image format.
602      * @param filePath The debug dump file path, null if don't want to dump to file.
603      */
validateImage( Image image, int width, int height, int format, String filePath)604     public static void validateImage(
605             Image image, int width, int height, int format, String filePath) {
606         if (VERBOSE) {
607             Plane[] imagePlanes = image.getPlanes();
608             Log.v(TAG, "Image " + filePath + " Info:");
609             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
610             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
611             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
612         }
613 
614         assertNotNull("Input image is invalid", image);
615         assertEquals("Format doesn't match", format, image.getFormat());
616         assertEquals("Width doesn't match", width, image.getCropRect().width());
617         assertEquals("Height doesn't match", height, image.getCropRect().height());
618 
619         if(VERBOSE) Log.v(TAG, "validating Image");
620         byte[] data = getDataFromImage(image);
621         assertTrue("Invalid image data", data != null && data.length > 0);
622 
623         validateYuvData(data, width, height, format, image.getTimestamp());
624 
625         if (VERBOSE && filePath != null) {
626             dumpFile(filePath, data);
627         }
628     }
629 
validateSwirl(Image image)630     private static void validateSwirl(Image image) {
631         Rect crop = image.getCropRect();
632         final int NUM_SIDES = 4;
633         final int step = 8;      // the width of the layers
634         long[][] rawStats = new long[NUM_SIDES][10];
635         int[][] colors = new int[][] {
636             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
637         };
638 
639         // successively accumulate statistics for each layer of the swirl
640         // by using overlapping rectangles, and the observation that
641         // layer_i = rectangle_i - rectangle_(i+1)
642         int lastLayer = 0;
643         int layer = 0;
644         boolean lastLayerValid = false;
645         for (int pos = 0; ; pos += step) {
646             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
647             if (area.isEmpty()) {
648                 break;
649             }
650             area.offset(crop.left, crop.top);
651             area.intersect(crop);
652             for (int lr = 0; lr < 2; ++lr) {
653                 long[] oneStat = CodecUtils.getRawStats(image, area);
654                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
655                                     + lastLayer + ": " + Arrays.toString(oneStat));
656                 for (int i = 0; i < oneStat.length; i++) {
657                     rawStats[layer][i] += oneStat[i];
658                     if (lastLayerValid) {
659                         rawStats[lastLayer][i] -= oneStat[i];
660                     }
661                 }
662                 if (VERBOSE && lastLayerValid) {
663                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
664                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
665                 }
666                 // switch to the opposite side
667                 layer ^= 2;      // NUM_SIDES / 2
668                 lastLayer ^= 2;  // NUM_SIDES / 2
669                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
670             }
671 
672             lastLayer = layer;
673             lastLayerValid = true;
674             layer = (layer + 1) % NUM_SIDES;
675         }
676 
677         for (layer = 0; layer < NUM_SIDES; ++layer) {
678             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
679             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
680             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
681 
682             // check layer uniformity
683             for (int i = 0; i < 3; i++) {
684                 assertTrue("color of layer-" + layer + " is not uniform: "
685                         + Arrays.toString(stats),
686                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
687             }
688 
689             // check layer color
690             for (int i = 0; i < 3; i++) {
691                 assertTrue("color of layer-" + layer + " mismatches target "
692                         + Arrays.toString(colors[layer]) + " vs "
693                         + Arrays.toString(Arrays.copyOf(stats, 3)),
694                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
695             }
696         }
697     }
698 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)699     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
700             long ts) {
701 
702         assertTrue("YUV format must be one of the YUV_420_888, NV21, or YV12",
703                 format == ImageFormat.YUV_420_888 ||
704                 format == ImageFormat.NV21 ||
705                 format == ImageFormat.YV12);
706 
707         if (VERBOSE) Log.v(TAG, "Validating YUV data");
708         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
709         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
710     }
711 
checkYuvFormat(int format)712     private static void checkYuvFormat(int format) {
713         if ((format != ImageFormat.YUV_420_888) &&
714                 (format != ImageFormat.NV21) &&
715                 (format != ImageFormat.YV12)) {
716             fail("Wrong formats: " + format);
717         }
718     }
719     /**
720      * <p>Check android image format validity for an image, only support below formats:</p>
721      *
722      * <p>Valid formats are YUV_420_888/NV21/YV12 for video decoder</p>
723      */
checkAndroidImageFormat(Image image)724     private static void checkAndroidImageFormat(Image image) {
725         int format = image.getFormat();
726         Plane[] planes = image.getPlanes();
727         switch (format) {
728             case ImageFormat.YUV_420_888:
729             case ImageFormat.NV21:
730             case ImageFormat.YV12:
731                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
732                 break;
733             default:
734                 fail("Unsupported Image Format: " + format);
735         }
736     }
737 
738     /**
739      * Get a byte array image data from an Image object.
740      * <p>
741      * Read data from all planes of an Image into a contiguous unpadded,
742      * unpacked 1-D linear byte array, such that it can be write into disk, or
743      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12
744      * input Image format.
745      * </p>
746      * <p>
747      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
748      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
749      * (xstride = width, ystride = height for chroma and luma components).
750      * </p>
751      */
getDataFromImage(Image image)752     private static byte[] getDataFromImage(Image image) {
753         assertNotNull("Invalid image:", image);
754         Rect crop = image.getCropRect();
755         int format = image.getFormat();
756         int width = crop.width();
757         int height = crop.height();
758         int rowStride, pixelStride;
759         byte[] data = null;
760 
761         // Read image data
762         Plane[] planes = image.getPlanes();
763         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
764 
765         // Check image validity
766         checkAndroidImageFormat(image);
767 
768         ByteBuffer buffer = null;
769 
770         int offset = 0;
771         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
772         byte[] rowData = new byte[planes[0].getRowStride()];
773         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
774         for (int i = 0; i < planes.length; i++) {
775             int shift = (i == 0) ? 0 : 1;
776             buffer = planes[i].getBuffer();
777             assertNotNull("Fail to get bytebuffer from plane", buffer);
778             rowStride = planes[i].getRowStride();
779             pixelStride = planes[i].getPixelStride();
780             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
781             if (VERBOSE) {
782                 Log.v(TAG, "pixelStride " + pixelStride);
783                 Log.v(TAG, "rowStride " + rowStride);
784                 Log.v(TAG, "width " + width);
785                 Log.v(TAG, "height " + height);
786             }
787             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
788             int w = crop.width() >> shift;
789             int h = crop.height() >> shift;
790             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
791             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
792             for (int row = 0; row < h; row++) {
793                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
794                 int length;
795                 if (pixelStride == bytesPerPixel) {
796                     // Special case: optimized read of the entire row
797                     length = w * bytesPerPixel;
798                     buffer.get(data, offset, length);
799                     offset += length;
800                 } else {
801                     // Generic case: should work for any pixelStride but slower.
802                     // Use intermediate buffer to avoid read byte-by-byte from
803                     // DirectByteBuffer, which is very bad for performance
804                     length = (w - 1) * pixelStride + bytesPerPixel;
805                     buffer.get(rowData, 0, length);
806                     for (int col = 0; col < w; col++) {
807                         data[offset++] = rowData[col * pixelStride];
808                     }
809                 }
810                 // Advance buffer the remainder of the row stride
811                 if (row < h - 1) {
812                     buffer.position(buffer.position() + rowStride - length);
813                 }
814             }
815             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
816         }
817         return data;
818     }
819 
dumpFile(String fileName, byte[] data)820     private static void dumpFile(String fileName, byte[] data) {
821         assertNotNull("fileName must not be null", fileName);
822         assertNotNull("data must not be null", data);
823 
824         FileOutputStream outStream;
825         try {
826             Log.v(TAG, "output will be saved as " + fileName);
827             outStream = new FileOutputStream(fileName);
828         } catch (IOException ioe) {
829             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
830         }
831 
832         try {
833             outStream.write(data);
834             outStream.close();
835         } catch (IOException ioe) {
836             throw new RuntimeException("failed writing data to file " + fileName, ioe);
837         }
838     }
839 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)840     private void createImageReader(
841             int width, int height, int format, int maxNumImages,
842             ImageReader.OnImageAvailableListener listener)  {
843         closeImageReader();
844 
845         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
846         mReaderSurface = mReader.getSurface();
847         mReader.setOnImageAvailableListener(listener, mHandler);
848         if (VERBOSE) {
849             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
850                     format));
851         }
852     }
853 
854     /**
855      * Close the pending images then close current active {@link ImageReader} object.
856      */
closeImageReader()857     private void closeImageReader() {
858         if (mReader != null) {
859             try {
860                 // Close all possible pending images first.
861                 Image image = mReader.acquireLatestImage();
862                 if (image != null) {
863                     image.close();
864                 }
865             } finally {
866                 mReader.close();
867                 mReader = null;
868             }
869         }
870     }
871 }
872