1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import android.media.cts.R;
20 
21 import android.content.Context;
22 import android.content.res.AssetFileDescriptor;
23 import android.content.res.Resources;
24 import android.content.res.Resources.NotFoundException;
25 import android.graphics.Rect;
26 import android.graphics.ImageFormat;
27 import android.media.cts.CodecUtils;
28 import android.media.Image;
29 import android.media.Image.Plane;
30 import android.media.ImageReader;
31 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecInfo.VideoCapabilities;
35 import android.media.MediaCodecList;
36 import android.media.MediaExtractor;
37 import android.media.MediaFormat;
38 import android.os.Handler;
39 import android.os.HandlerThread;
40 import android.os.Looper;
41 import android.test.AndroidTestCase;
42 import android.util.Log;
43 import android.view.Surface;
44 
45 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
46 
47 import com.android.compatibility.common.util.MediaUtils;
48 
49 import java.io.File;
50 import java.io.FileOutputStream;
51 import java.io.InputStream;
52 import java.io.IOException;
53 import java.nio.ByteBuffer;
54 import java.util.Arrays;
55 import java.util.ArrayList;
56 import java.util.concurrent.LinkedBlockingQueue;
57 import java.util.concurrent.TimeUnit;
58 
59 /**
60  * Basic test for ImageReader APIs.
61  * <p>
62  * It uses MediaCodec to decode a short video stream, send the video frames to
63  * the surface provided by ImageReader. Then compare if output buffers of the
64  * ImageReader matches the output buffers of the MediaCodec. The video format
65  * used here is AVC although the compression format doesn't matter for this
66  * test. For decoder test, hw and sw decoders are tested,
67  * </p>
68  */
69 public class ImageReaderDecoderTest extends AndroidTestCase {
70     private static final String TAG = "ImageReaderDecoderTest";
71     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
72     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
73     private static final long DEFAULT_TIMEOUT_US = 10000;
74     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
75     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
76     private static final int NUM_FRAME_DECODED = 100;
77     // video decoders only support a single outstanding image with the consumer
78     private static final int MAX_NUM_IMAGES = 1;
79     private static final float COLOR_STDEV_ALLOWANCE = 5f;
80     private static final float COLOR_DELTA_ALLOWANCE = 5f;
81 
82     private final static int MODE_IMAGEREADER = 0;
83     private final static int MODE_IMAGE       = 1;
84 
85     private Resources mResources;
86     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
87     private ImageReader mReader;
88     private Surface mReaderSurface;
89     private HandlerThread mHandlerThread;
90     private Handler mHandler;
91     private ImageListener mImageListener;
92 
93     @Override
setContext(Context context)94     public void setContext(Context context) {
95         super.setContext(context);
96         mResources = mContext.getResources();
97     }
98 
99     @Override
setUp()100     protected void setUp() throws Exception {
101         super.setUp();
102         mHandlerThread = new HandlerThread(TAG);
103         mHandlerThread.start();
104         mHandler = new Handler(mHandlerThread.getLooper());
105         mImageListener = new ImageListener();
106     }
107 
108     @Override
tearDown()109     protected void tearDown() throws Exception {
110         mHandlerThread.quitSafely();
111         mHandler = null;
112     }
113 
114     static class MediaAsset {
MediaAsset(int resource, int width, int height)115         public MediaAsset(int resource, int width, int height) {
116             mResource = resource;
117             mWidth = width;
118             mHeight = height;
119         }
120 
getWidth()121         public int getWidth() {
122             return mWidth;
123         }
124 
getHeight()125         public int getHeight() {
126             return mHeight;
127         }
128 
getResource()129         public int getResource() {
130             return mResource;
131         }
132 
133         private final int mResource;
134         private final int mWidth;
135         private final int mHeight;
136     }
137 
138     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)139         public MediaAssets(String mime, MediaAsset... assets) {
140             mMime = mime;
141             mAssets = assets;
142         }
143 
getMime()144         public String getMime() {
145             return mMime;
146         }
147 
getAssets()148         public MediaAsset[] getAssets() {
149             return mAssets;
150         }
151 
152         private final String mMime;
153         private final MediaAsset[] mAssets;
154     }
155 
156     private static MediaAssets H263_ASSETS = new MediaAssets(
157             MediaFormat.MIMETYPE_VIDEO_H263,
158             new MediaAsset(R.raw.swirl_176x144_h263, 176, 144),
159             new MediaAsset(R.raw.swirl_352x288_h263, 352, 288),
160             new MediaAsset(R.raw.swirl_128x96_h263, 128, 96));
161 
162     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
163             MediaFormat.MIMETYPE_VIDEO_MPEG4,
164             new MediaAsset(R.raw.swirl_128x128_mpeg4, 128, 128),
165             new MediaAsset(R.raw.swirl_144x136_mpeg4, 144, 136),
166             new MediaAsset(R.raw.swirl_136x144_mpeg4, 136, 144),
167             new MediaAsset(R.raw.swirl_132x130_mpeg4, 132, 130),
168             new MediaAsset(R.raw.swirl_130x132_mpeg4, 130, 132));
169 
170     private static MediaAssets H264_ASSETS = new MediaAssets(
171             MediaFormat.MIMETYPE_VIDEO_AVC,
172             new MediaAsset(R.raw.swirl_128x128_h264, 128, 128),
173             new MediaAsset(R.raw.swirl_144x136_h264, 144, 136),
174             new MediaAsset(R.raw.swirl_136x144_h264, 136, 144),
175             new MediaAsset(R.raw.swirl_132x130_h264, 132, 130),
176             new MediaAsset(R.raw.swirl_130x132_h264, 130, 132));
177 
178     private static MediaAssets H265_ASSETS = new MediaAssets(
179             MediaFormat.MIMETYPE_VIDEO_HEVC,
180             new MediaAsset(R.raw.swirl_128x128_h265, 128, 128),
181             new MediaAsset(R.raw.swirl_144x136_h265, 144, 136),
182             new MediaAsset(R.raw.swirl_136x144_h265, 136, 144),
183             new MediaAsset(R.raw.swirl_132x130_h265, 132, 130),
184             new MediaAsset(R.raw.swirl_130x132_h265, 130, 132));
185 
186     private static MediaAssets VP8_ASSETS = new MediaAssets(
187             MediaFormat.MIMETYPE_VIDEO_VP8,
188             new MediaAsset(R.raw.swirl_128x128_vp8, 128, 128),
189             new MediaAsset(R.raw.swirl_144x136_vp8, 144, 136),
190             new MediaAsset(R.raw.swirl_136x144_vp8, 136, 144),
191             new MediaAsset(R.raw.swirl_132x130_vp8, 132, 130),
192             new MediaAsset(R.raw.swirl_130x132_vp8, 130, 132));
193 
194     private static MediaAssets VP9_ASSETS = new MediaAssets(
195             MediaFormat.MIMETYPE_VIDEO_VP9,
196             new MediaAsset(R.raw.swirl_128x128_vp9, 128, 128),
197             new MediaAsset(R.raw.swirl_144x136_vp9, 144, 136),
198             new MediaAsset(R.raw.swirl_136x144_vp9, 136, 144),
199             new MediaAsset(R.raw.swirl_132x130_vp9, 132, 130),
200             new MediaAsset(R.raw.swirl_130x132_vp9, 130, 132));
201 
202     static final float SWIRL_FPS = 12.f;
203 
204     class Decoder {
205         final private String mName;
206         final private String mMime;
207         final private VideoCapabilities mCaps;
208         final private ArrayList<MediaAsset> mAssets;
209 
isFlexibleFormatSupported(CodecCapabilities caps)210         boolean isFlexibleFormatSupported(CodecCapabilities caps) {
211             for (int c : caps.colorFormats) {
212                 if (c == COLOR_FormatYUV420Flexible) {
213                     return true;
214                 }
215             }
216             return false;
217         }
218 
Decoder(String name, MediaAssets assets, CodecCapabilities caps)219         Decoder(String name, MediaAssets assets, CodecCapabilities caps) {
220             mName = name;
221             mMime = assets.getMime();
222             mCaps = caps.getVideoCapabilities();
223             mAssets = new ArrayList<MediaAsset>();
224 
225             for (MediaAsset asset : assets.getAssets()) {
226                 if (mCaps.areSizeAndRateSupported(asset.getWidth(), asset.getHeight(), SWIRL_FPS)
227                         && isFlexibleFormatSupported(caps)) {
228                     mAssets.add(asset);
229                 }
230             }
231         }
232 
videoDecode(int mode, boolean checkSwirl)233         public boolean videoDecode(int mode, boolean checkSwirl) {
234             boolean skipped = true;
235             for (MediaAsset asset: mAssets) {
236                 // TODO: loop over all supported image formats
237                 int imageFormat = ImageFormat.YUV_420_888;
238                 int colorFormat = COLOR_FormatYUV420Flexible;
239                 videoDecode(asset, imageFormat, colorFormat, mode, checkSwirl);
240                 skipped = false;
241             }
242             return skipped;
243         }
244 
videoDecode( MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl)245         private void videoDecode(
246                 MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl) {
247             int video = asset.getResource();
248             int width = asset.getWidth();
249             int height = asset.getHeight();
250 
251             if (DEBUG) Log.d(TAG, "videoDecode " + mName + " " + width + "x" + height);
252 
253             MediaCodec decoder = null;
254             AssetFileDescriptor vidFD = null;
255 
256             MediaExtractor extractor = null;
257             File tmpFile = null;
258             InputStream is = null;
259             FileOutputStream os = null;
260             MediaFormat mediaFormat = null;
261             try {
262                 extractor = new MediaExtractor();
263 
264                 try {
265                     vidFD = mResources.openRawResourceFd(video);
266                     extractor.setDataSource(
267                             vidFD.getFileDescriptor(), vidFD.getStartOffset(), vidFD.getLength());
268                 } catch (NotFoundException e) {
269                     // resource is compressed, uncompress locally
270                     String tmpName = "tempStream";
271                     tmpFile = File.createTempFile(tmpName, null, mContext.getCacheDir());
272                     is = mResources.openRawResource(video);
273                     os = new FileOutputStream(tmpFile);
274                     byte[] buf = new byte[1024];
275                     int len;
276                     while ((len = is.read(buf, 0, buf.length)) > 0) {
277                         os.write(buf, 0, len);
278                     }
279                     os.close();
280                     is.close();
281 
282                     extractor.setDataSource(tmpFile.getAbsolutePath());
283                 }
284 
285                 mediaFormat = extractor.getTrackFormat(0);
286                 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
287 
288                 // Create decoder
289                 decoder = MediaCodec.createByCodecName(mName);
290                 assertNotNull("couldn't create decoder" + mName, decoder);
291 
292                 decodeFramesToImage(
293                         decoder, extractor, mediaFormat,
294                         width, height, imageFormat, mode, checkSwirl);
295 
296                 decoder.stop();
297                 if (vidFD != null) {
298                     vidFD.close();
299                 }
300             } catch (Throwable e) {
301                 throw new RuntimeException("while " + mName + " decoding "
302                         + mResources.getResourceEntryName(video) + ": " + mediaFormat, e);
303             } finally {
304                 if (decoder != null) {
305                     decoder.release();
306                 }
307                 if (extractor != null) {
308                     extractor.release();
309                 }
310                 if (tmpFile != null) {
311                     tmpFile.delete();
312                 }
313             }
314         }
315     }
316 
decoders(MediaAssets assets, boolean goog)317     private Decoder[] decoders(MediaAssets assets, boolean goog) {
318         String mime = assets.getMime();
319         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
320         ArrayList<Decoder> result = new ArrayList<Decoder>();
321 
322         for (MediaCodecInfo info : mcl.getCodecInfos()) {
323             if (info.isEncoder() || MediaUtils.isGoogle(info.getName()) != goog) {
324                 continue;
325             }
326             CodecCapabilities caps = null;
327             try {
328                 caps = info.getCapabilitiesForType(mime);
329             } catch (IllegalArgumentException e) { // mime is not supported
330                 continue;
331             }
332             assertNotNull(info.getName() + " capabilties for " + mime + " returned null", caps);
333             result.add(new Decoder(info.getName(), assets, caps));
334         }
335         return result.toArray(new Decoder[result.size()]);
336     }
337 
goog(MediaAssets assets)338     private Decoder[] goog(MediaAssets assets) {
339         return decoders(assets, true /* goog */);
340     }
341 
other(MediaAssets assets)342     private Decoder[] other(MediaAssets assets) {
343         return decoders(assets, false /* goog */);
344     }
345 
googH265()346     private Decoder[] googH265()  { return goog(H265_ASSETS); }
googH264()347     private Decoder[] googH264()  { return goog(H264_ASSETS); }
googH263()348     private Decoder[] googH263()  { return goog(H263_ASSETS); }
googMpeg4()349     private Decoder[] googMpeg4() { return goog(MPEG4_ASSETS); }
googVP8()350     private Decoder[] googVP8()   { return goog(VP8_ASSETS); }
googVP9()351     private Decoder[] googVP9()   { return goog(VP9_ASSETS); }
352 
otherH265()353     private Decoder[] otherH265()  { return other(H265_ASSETS); }
otherH264()354     private Decoder[] otherH264()  { return other(H264_ASSETS); }
otherH263()355     private Decoder[] otherH263()  { return other(H263_ASSETS); }
otherMpeg4()356     private Decoder[] otherMpeg4() { return other(MPEG4_ASSETS); }
otherVP8()357     private Decoder[] otherVP8()   { return other(VP8_ASSETS); }
otherVP9()358     private Decoder[] otherVP9()   { return other(VP9_ASSETS); }
359 
testGoogH265Image()360     public void testGoogH265Image()   { swirlTest(googH265(),   MODE_IMAGE); }
testGoogH264Image()361     public void testGoogH264Image()   { swirlTest(googH264(),   MODE_IMAGE); }
testGoogH263Image()362     public void testGoogH263Image()   { swirlTest(googH263(),   MODE_IMAGE); }
testGoogMpeg4Image()363     public void testGoogMpeg4Image()  { swirlTest(googMpeg4(),  MODE_IMAGE); }
testGoogVP8Image()364     public void testGoogVP8Image()    { swirlTest(googVP8(),    MODE_IMAGE); }
testGoogVP9Image()365     public void testGoogVP9Image()    { swirlTest(googVP9(),    MODE_IMAGE); }
366 
testOtherH265Image()367     public void testOtherH265Image()  { swirlTest(otherH265(),  MODE_IMAGE); }
testOtherH264Image()368     public void testOtherH264Image()  { swirlTest(otherH264(),  MODE_IMAGE); }
testOtherH263Image()369     public void testOtherH263Image()  { swirlTest(otherH263(),  MODE_IMAGE); }
testOtherMpeg4Image()370     public void testOtherMpeg4Image() { swirlTest(otherMpeg4(), MODE_IMAGE); }
testOtherVP8Image()371     public void testOtherVP8Image()   { swirlTest(otherVP8(),   MODE_IMAGE); }
testOtherVP9Image()372     public void testOtherVP9Image()   { swirlTest(otherVP9(),   MODE_IMAGE); }
373 
testGoogH265ImageReader()374     public void testGoogH265ImageReader()   { swirlTest(googH265(),   MODE_IMAGEREADER); }
testGoogH264ImageReader()375     public void testGoogH264ImageReader()   { swirlTest(googH264(),   MODE_IMAGEREADER); }
testGoogH263ImageReader()376     public void testGoogH263ImageReader()   { swirlTest(googH263(),   MODE_IMAGEREADER); }
testGoogMpeg4ImageReader()377     public void testGoogMpeg4ImageReader()  { swirlTest(googMpeg4(),  MODE_IMAGEREADER); }
testGoogVP8ImageReader()378     public void testGoogVP8ImageReader()    { swirlTest(googVP8(),    MODE_IMAGEREADER); }
testGoogVP9ImageReader()379     public void testGoogVP9ImageReader()    { swirlTest(googVP9(),    MODE_IMAGEREADER); }
380 
testOtherH265ImageReader()381     public void testOtherH265ImageReader()  { swirlTest(otherH265(),  MODE_IMAGEREADER); }
testOtherH264ImageReader()382     public void testOtherH264ImageReader()  { swirlTest(otherH264(),  MODE_IMAGEREADER); }
testOtherH263ImageReader()383     public void testOtherH263ImageReader()  { swirlTest(otherH263(),  MODE_IMAGEREADER); }
testOtherMpeg4ImageReader()384     public void testOtherMpeg4ImageReader() { swirlTest(otherMpeg4(), MODE_IMAGEREADER); }
testOtherVP8ImageReader()385     public void testOtherVP8ImageReader()   { swirlTest(otherVP8(),   MODE_IMAGEREADER); }
testOtherVP9ImageReader()386     public void testOtherVP9ImageReader()   { swirlTest(otherVP9(),   MODE_IMAGEREADER); }
387 
388     /**
389      * Test ImageReader with 480x360 non-google AVC decoding for flexible yuv format
390      */
testHwAVCDecode360pForFlexibleYuv()391     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
392         Decoder[] decoders = other(new MediaAssets(
393                 MediaFormat.MIMETYPE_VIDEO_AVC,
394                 new MediaAsset(
395                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
396                         480 /* width */, 360 /* height */)));
397 
398         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
399     }
400 
401     /**
402      * Test ImageReader with 480x360 google (SW) AVC decoding for flexible yuv format
403      */
testSwAVCDecode360pForFlexibleYuv()404     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
405         Decoder[] decoders = goog(new MediaAssets(
406                 MediaFormat.MIMETYPE_VIDEO_AVC,
407                 new MediaAsset(
408                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
409                         480 /* width */, 360 /* height */)));
410 
411         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
412     }
413 
swirlTest(Decoder[] decoders, int mode)414     private void swirlTest(Decoder[] decoders, int mode) {
415         decodeTest(decoders, mode, true /* checkSwirl */);
416     }
417 
decodeTest(Decoder[] decoders, int mode, boolean checkSwirl)418     private void decodeTest(Decoder[] decoders, int mode, boolean checkSwirl) {
419         try {
420             boolean skipped = true;
421             for (Decoder codec : decoders) {
422                 if (codec.videoDecode(mode, checkSwirl)) {
423                     skipped = false;
424                 }
425             }
426             if (skipped) {
427                 MediaUtils.skipTest("decoder does not any of the input files");
428             }
429         } finally {
430             closeImageReader();
431         }
432     }
433 
434     private static class ImageListener implements ImageReader.OnImageAvailableListener {
435         private final LinkedBlockingQueue<Image> mQueue =
436                 new LinkedBlockingQueue<Image>();
437 
438         @Override
onImageAvailable(ImageReader reader)439         public void onImageAvailable(ImageReader reader) {
440             try {
441                 mQueue.put(reader.acquireNextImage());
442             } catch (InterruptedException e) {
443                 throw new UnsupportedOperationException(
444                         "Can't handle InterruptedException in onImageAvailable");
445             }
446         }
447 
448         /**
449          * Get an image from the image reader.
450          *
451          * @param timeout Timeout value for the wait.
452          * @return The image from the image reader.
453          */
getImage(long timeout)454         public Image getImage(long timeout) throws InterruptedException {
455             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
456             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
457             return image;
458         }
459     }
460 
461     /**
462      * Decode video frames to image reader.
463      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)464     private void decodeFramesToImage(
465             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
466             int width, int height, int imageFormat, int mode, boolean checkSwirl)
467             throws InterruptedException {
468         ByteBuffer[] decoderInputBuffers;
469         ByteBuffer[] decoderOutputBuffers;
470 
471         // Configure decoder.
472         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
473         if (mode == MODE_IMAGEREADER) {
474             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
475             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
476         } else {
477             assertEquals(mode, MODE_IMAGE);
478             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
479         }
480 
481         decoder.start();
482         decoderInputBuffers = decoder.getInputBuffers();
483         decoderOutputBuffers = decoder.getOutputBuffers();
484         extractor.selectTrack(0);
485 
486         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
487         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
488         boolean sawInputEOS = false;
489         boolean sawOutputEOS = false;
490         int outputFrameCount = 0;
491         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
492             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
493             // Feed input frame.
494             if (!sawInputEOS) {
495                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
496                 if (inputBufIndex >= 0) {
497                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
498                     int sampleSize =
499                         extractor.readSampleData(dstBuf, 0 /* offset */);
500 
501                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
502                         + inputBufIndex + "/" + sampleSize);
503 
504                     long presentationTimeUs = 0;
505 
506                     if (sampleSize < 0) {
507                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
508                         sawInputEOS = true;
509                         sampleSize = 0;
510                     } else {
511                         presentationTimeUs = extractor.getSampleTime();
512                     }
513 
514                     decoder.queueInputBuffer(
515                             inputBufIndex,
516                             0 /* offset */,
517                             sampleSize,
518                             presentationTimeUs,
519                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
520 
521                     if (!sawInputEOS) {
522                         extractor.advance();
523                     }
524                 }
525             }
526 
527             // Get output frame
528             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
529             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
530             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
531                 // no output available yet
532                 if (VERBOSE) Log.v(TAG, "no output frame available");
533             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
534                 // decoder output buffers changed, need update.
535                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
536                 decoderOutputBuffers = decoder.getOutputBuffers();
537             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
538                 // this happens before the first frame is returned.
539                 MediaFormat outFormat = decoder.getOutputFormat();
540                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
541             } else if (res < 0) {
542                 // Should be decoding error.
543                 fail("unexpected result from deocder.dequeueOutputBuffer: " + res);
544             } else {
545                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
546                     sawOutputEOS = true;
547                 }
548 
549                 // res >= 0: normal decoding case, copy the output buffer.
550                 // Will use it as reference to valid the ImageReader output
551                 // Some decoders output a 0-sized buffer at the end. Ignore those.
552                 boolean doRender = (info.size != 0);
553 
554                 if (doRender) {
555                     outputFrameCount++;
556                     String fileName = DEBUG_FILE_NAME_BASE + MediaUtils.getTestName()
557                             + (mode == MODE_IMAGE ? "_image_" : "_reader_")
558                             + width + "x" + height + "_" + outputFrameCount + ".yuv";
559 
560                     Image image = null;
561                     try {
562                         if (mode == MODE_IMAGE) {
563                             image = decoder.getOutputImage(res);
564                         } else {
565                             decoder.releaseOutputBuffer(res, doRender);
566                             res = -1;
567                             // Read image and verify
568                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
569                         }
570                         validateImage(image, width, height, imageFormat, fileName);
571 
572                         if (checkSwirl) {
573                             try {
574                                 validateSwirl(image);
575                             } catch (Throwable e) {
576                                 dumpFile(fileName, getDataFromImage(image));
577                                 throw e;
578                             }
579                         }
580                     } finally {
581                         if (image != null) {
582                             image.close();
583                         }
584                     }
585                 }
586 
587                 if (res >= 0) {
588                     decoder.releaseOutputBuffer(res, false /* render */);
589                 }
590             }
591         }
592     }
593 
594     /**
595      * Validate image based on format and size.
596      *
597      * @param image The image to be validated.
598      * @param width The image width.
599      * @param height The image height.
600      * @param format The image format.
601      * @param filePath The debug dump file path, null if don't want to dump to file.
602      */
validateImage( Image image, int width, int height, int format, String filePath)603     public static void validateImage(
604             Image image, int width, int height, int format, String filePath) {
605         if (VERBOSE) {
606             Plane[] imagePlanes = image.getPlanes();
607             Log.v(TAG, "Image " + filePath + " Info:");
608             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
609             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
610             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
611         }
612 
613         assertNotNull("Input image is invalid", image);
614         assertEquals("Format doesn't match", format, image.getFormat());
615         assertEquals("Width doesn't match", width, image.getCropRect().width());
616         assertEquals("Height doesn't match", height, image.getCropRect().height());
617 
618         if(VERBOSE) Log.v(TAG, "validating Image");
619         byte[] data = getDataFromImage(image);
620         assertTrue("Invalid image data", data != null && data.length > 0);
621 
622         validateYuvData(data, width, height, format, image.getTimestamp());
623 
624         if (VERBOSE && filePath != null) {
625             dumpFile(filePath, data);
626         }
627     }
628 
validateSwirl(Image image)629     private static void validateSwirl(Image image) {
630         Rect crop = image.getCropRect();
631         final int NUM_SIDES = 4;
632         final int step = 8;      // the width of the layers
633         long[][] rawStats = new long[NUM_SIDES][10];
634         int[][] colors = new int[][] {
635             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
636         };
637 
638         // successively accumulate statistics for each layer of the swirl
639         // by using overlapping rectangles, and the observation that
640         // layer_i = rectangle_i - rectangle_(i+1)
641         int lastLayer = 0;
642         int layer = 0;
643         boolean lastLayerValid = false;
644         for (int pos = 0; ; pos += step) {
645             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
646             if (area.isEmpty()) {
647                 break;
648             }
649             area.offset(crop.left, crop.top);
650             area.intersect(crop);
651             for (int lr = 0; lr < 2; ++lr) {
652                 long[] oneStat = CodecUtils.getRawStats(image, area);
653                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
654                                     + lastLayer + ": " + Arrays.toString(oneStat));
655                 for (int i = 0; i < oneStat.length; i++) {
656                     rawStats[layer][i] += oneStat[i];
657                     if (lastLayerValid) {
658                         rawStats[lastLayer][i] -= oneStat[i];
659                     }
660                 }
661                 if (VERBOSE && lastLayerValid) {
662                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
663                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
664                 }
665                 // switch to the opposite side
666                 layer ^= 2;      // NUM_SIDES / 2
667                 lastLayer ^= 2;  // NUM_SIDES / 2
668                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
669             }
670 
671             lastLayer = layer;
672             lastLayerValid = true;
673             layer = (layer + 1) % NUM_SIDES;
674         }
675 
676         for (layer = 0; layer < NUM_SIDES; ++layer) {
677             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
678             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
679             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
680 
681             // check layer uniformity
682             for (int i = 0; i < 3; i++) {
683                 assertTrue("color of layer-" + layer + " is not uniform: "
684                         + Arrays.toString(stats),
685                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
686             }
687 
688             // check layer color
689             for (int i = 0; i < 3; i++) {
690                 assertTrue("color of layer-" + layer + " mismatches target "
691                         + Arrays.toString(colors[layer]) + " vs "
692                         + Arrays.toString(Arrays.copyOf(stats, 3)),
693                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
694             }
695         }
696     }
697 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)698     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
699             long ts) {
700 
701         assertTrue("YUV format must be one of the YUV_420_888, NV21, or YV12",
702                 format == ImageFormat.YUV_420_888 ||
703                 format == ImageFormat.NV21 ||
704                 format == ImageFormat.YV12);
705 
706         if (VERBOSE) Log.v(TAG, "Validating YUV data");
707         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
708         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
709     }
710 
checkYuvFormat(int format)711     private static void checkYuvFormat(int format) {
712         if ((format != ImageFormat.YUV_420_888) &&
713                 (format != ImageFormat.NV21) &&
714                 (format != ImageFormat.YV12)) {
715             fail("Wrong formats: " + format);
716         }
717     }
718     /**
719      * <p>Check android image format validity for an image, only support below formats:</p>
720      *
721      * <p>Valid formats are YUV_420_888/NV21/YV12 for video decoder</p>
722      */
checkAndroidImageFormat(Image image)723     private static void checkAndroidImageFormat(Image image) {
724         int format = image.getFormat();
725         Plane[] planes = image.getPlanes();
726         switch (format) {
727             case ImageFormat.YUV_420_888:
728             case ImageFormat.NV21:
729             case ImageFormat.YV12:
730                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
731                 break;
732             default:
733                 fail("Unsupported Image Format: " + format);
734         }
735     }
736 
737     /**
738      * Get a byte array image data from an Image object.
739      * <p>
740      * Read data from all planes of an Image into a contiguous unpadded,
741      * unpacked 1-D linear byte array, such that it can be write into disk, or
742      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12
743      * input Image format.
744      * </p>
745      * <p>
746      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
747      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
748      * (xstride = width, ystride = height for chroma and luma components).
749      * </p>
750      */
getDataFromImage(Image image)751     private static byte[] getDataFromImage(Image image) {
752         assertNotNull("Invalid image:", image);
753         Rect crop = image.getCropRect();
754         int format = image.getFormat();
755         int width = crop.width();
756         int height = crop.height();
757         int rowStride, pixelStride;
758         byte[] data = null;
759 
760         // Read image data
761         Plane[] planes = image.getPlanes();
762         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
763 
764         // Check image validity
765         checkAndroidImageFormat(image);
766 
767         ByteBuffer buffer = null;
768 
769         int offset = 0;
770         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
771         byte[] rowData = new byte[planes[0].getRowStride()];
772         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
773         for (int i = 0; i < planes.length; i++) {
774             int shift = (i == 0) ? 0 : 1;
775             buffer = planes[i].getBuffer();
776             assertNotNull("Fail to get bytebuffer from plane", buffer);
777             rowStride = planes[i].getRowStride();
778             pixelStride = planes[i].getPixelStride();
779             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
780             if (VERBOSE) {
781                 Log.v(TAG, "pixelStride " + pixelStride);
782                 Log.v(TAG, "rowStride " + rowStride);
783                 Log.v(TAG, "width " + width);
784                 Log.v(TAG, "height " + height);
785             }
786             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
787             int w = crop.width() >> shift;
788             int h = crop.height() >> shift;
789             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
790             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
791             for (int row = 0; row < h; row++) {
792                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
793                 int length;
794                 if (pixelStride == bytesPerPixel) {
795                     // Special case: optimized read of the entire row
796                     length = w * bytesPerPixel;
797                     buffer.get(data, offset, length);
798                     offset += length;
799                 } else {
800                     // Generic case: should work for any pixelStride but slower.
801                     // Use intermediate buffer to avoid read byte-by-byte from
802                     // DirectByteBuffer, which is very bad for performance
803                     length = (w - 1) * pixelStride + bytesPerPixel;
804                     buffer.get(rowData, 0, length);
805                     for (int col = 0; col < w; col++) {
806                         data[offset++] = rowData[col * pixelStride];
807                     }
808                 }
809                 // Advance buffer the remainder of the row stride
810                 if (row < h - 1) {
811                     buffer.position(buffer.position() + rowStride - length);
812                 }
813             }
814             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
815         }
816         return data;
817     }
818 
dumpFile(String fileName, byte[] data)819     private static void dumpFile(String fileName, byte[] data) {
820         assertNotNull("fileName must not be null", fileName);
821         assertNotNull("data must not be null", data);
822 
823         FileOutputStream outStream;
824         try {
825             Log.v(TAG, "output will be saved as " + fileName);
826             outStream = new FileOutputStream(fileName);
827         } catch (IOException ioe) {
828             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
829         }
830 
831         try {
832             outStream.write(data);
833             outStream.close();
834         } catch (IOException ioe) {
835             throw new RuntimeException("failed writing data to file " + fileName, ioe);
836         }
837     }
838 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)839     private void createImageReader(
840             int width, int height, int format, int maxNumImages,
841             ImageReader.OnImageAvailableListener listener)  {
842         closeImageReader();
843 
844         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
845         mReaderSurface = mReader.getSurface();
846         mReader.setOnImageAvailableListener(listener, mHandler);
847         if (VERBOSE) {
848             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
849                     format));
850         }
851     }
852 
853     /**
854      * Close the pending images then close current active {@link ImageReader} object.
855      */
closeImageReader()856     private void closeImageReader() {
857         if (mReader != null) {
858             try {
859                 // Close all possible pending images first.
860                 Image image = mReader.acquireLatestImage();
861                 if (image != null) {
862                     image.close();
863                 }
864             } finally {
865                 mReader.close();
866                 mReader = null;
867             }
868         }
869     }
870 }
871