1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import com.android.cts.media.R;
20 
21 import android.content.Context;
22 import android.content.res.AssetFileDescriptor;
23 import android.content.res.Resources;
24 import android.content.res.Resources.NotFoundException;
25 import android.cts.util.MediaUtils;
26 import android.graphics.Rect;
27 import android.graphics.ImageFormat;
28 import android.media.cts.CodecUtils;
29 import android.media.Image;
30 import android.media.Image.Plane;
31 import android.media.ImageReader;
32 import android.media.MediaCodec;
33 import android.media.MediaCodecInfo;
34 import android.media.MediaCodecInfo.CodecCapabilities;
35 import android.media.MediaCodecInfo.VideoCapabilities;
36 import android.media.MediaCodecList;
37 import android.media.MediaExtractor;
38 import android.media.MediaFormat;
39 import android.os.Handler;
40 import android.os.HandlerThread;
41 import android.os.Looper;
42 import android.test.AndroidTestCase;
43 import android.util.Log;
44 import android.view.Surface;
45 
46 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
47 
48 import java.io.File;
49 import java.io.FileOutputStream;
50 import java.io.InputStream;
51 import java.io.IOException;
52 import java.nio.ByteBuffer;
53 import java.util.Arrays;
54 import java.util.ArrayList;
55 import java.util.concurrent.LinkedBlockingQueue;
56 import java.util.concurrent.TimeUnit;
57 
58 /**
59  * Basic test for ImageReader APIs.
60  * <p>
61  * It uses MediaCodec to decode a short video stream, send the video frames to
62  * the surface provided by ImageReader. Then compare if output buffers of the
63  * ImageReader matches the output buffers of the MediaCodec. The video format
64  * used here is AVC although the compression format doesn't matter for this
65  * test. For decoder test, hw and sw decoders are tested,
66  * </p>
67  */
68 public class ImageReaderDecoderTest extends AndroidTestCase {
69     private static final String TAG = "ImageReaderDecoderTest";
70     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
71     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
72     private static final long DEFAULT_TIMEOUT_US = 10000;
73     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
74     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
75     private static final int NUM_FRAME_DECODED = 100;
76     // video decoders only support a single outstanding image with the consumer
77     private static final int MAX_NUM_IMAGES = 1;
78     private static final float COLOR_STDEV_ALLOWANCE = 5f;
79     private static final float COLOR_DELTA_ALLOWANCE = 5f;
80 
81     private final static int MODE_IMAGEREADER = 0;
82     private final static int MODE_IMAGE       = 1;
83 
84     private Resources mResources;
85     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
86     private ImageReader mReader;
87     private Surface mReaderSurface;
88     private HandlerThread mHandlerThread;
89     private Handler mHandler;
90     private ImageListener mImageListener;
91 
92     @Override
setContext(Context context)93     public void setContext(Context context) {
94         super.setContext(context);
95         mResources = mContext.getResources();
96     }
97 
98     @Override
setUp()99     protected void setUp() throws Exception {
100         super.setUp();
101         mHandlerThread = new HandlerThread(TAG);
102         mHandlerThread.start();
103         mHandler = new Handler(mHandlerThread.getLooper());
104         mImageListener = new ImageListener();
105     }
106 
107     @Override
tearDown()108     protected void tearDown() throws Exception {
109         mHandlerThread.quitSafely();
110         mHandler = null;
111     }
112 
113     static class MediaAsset {
MediaAsset(int resource, int width, int height)114         public MediaAsset(int resource, int width, int height) {
115             mResource = resource;
116             mWidth = width;
117             mHeight = height;
118         }
119 
getWidth()120         public int getWidth() {
121             return mWidth;
122         }
123 
getHeight()124         public int getHeight() {
125             return mHeight;
126         }
127 
getResource()128         public int getResource() {
129             return mResource;
130         }
131 
132         private final int mResource;
133         private final int mWidth;
134         private final int mHeight;
135     }
136 
137     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)138         public MediaAssets(String mime, MediaAsset... assets) {
139             mMime = mime;
140             mAssets = assets;
141         }
142 
getMime()143         public String getMime() {
144             return mMime;
145         }
146 
getAssets()147         public MediaAsset[] getAssets() {
148             return mAssets;
149         }
150 
151         private final String mMime;
152         private final MediaAsset[] mAssets;
153     }
154 
155     private static MediaAssets H263_ASSETS = new MediaAssets(
156             MediaFormat.MIMETYPE_VIDEO_H263,
157             new MediaAsset(R.raw.swirl_176x144_h263, 176, 144),
158             new MediaAsset(R.raw.swirl_352x288_h263, 352, 288),
159             new MediaAsset(R.raw.swirl_128x96_h263, 128, 96));
160 
161     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
162             MediaFormat.MIMETYPE_VIDEO_MPEG4,
163             new MediaAsset(R.raw.swirl_128x128_mpeg4, 128, 128),
164             new MediaAsset(R.raw.swirl_144x136_mpeg4, 144, 136),
165             new MediaAsset(R.raw.swirl_136x144_mpeg4, 136, 144),
166             new MediaAsset(R.raw.swirl_132x130_mpeg4, 132, 130),
167             new MediaAsset(R.raw.swirl_130x132_mpeg4, 130, 132));
168 
169     private static MediaAssets H264_ASSETS = new MediaAssets(
170             MediaFormat.MIMETYPE_VIDEO_AVC,
171             new MediaAsset(R.raw.swirl_128x128_h264, 128, 128),
172             new MediaAsset(R.raw.swirl_144x136_h264, 144, 136),
173             new MediaAsset(R.raw.swirl_136x144_h264, 136, 144),
174             new MediaAsset(R.raw.swirl_132x130_h264, 132, 130),
175             new MediaAsset(R.raw.swirl_130x132_h264, 130, 132));
176 
177     private static MediaAssets H265_ASSETS = new MediaAssets(
178             MediaFormat.MIMETYPE_VIDEO_HEVC,
179             new MediaAsset(R.raw.swirl_128x128_h265, 128, 128),
180             new MediaAsset(R.raw.swirl_144x136_h265, 144, 136),
181             new MediaAsset(R.raw.swirl_136x144_h265, 136, 144),
182             new MediaAsset(R.raw.swirl_132x130_h265, 132, 130),
183             new MediaAsset(R.raw.swirl_130x132_h265, 130, 132));
184 
185     private static MediaAssets VP8_ASSETS = new MediaAssets(
186             MediaFormat.MIMETYPE_VIDEO_VP8,
187             new MediaAsset(R.raw.swirl_128x128_vp8, 128, 128),
188             new MediaAsset(R.raw.swirl_144x136_vp8, 144, 136),
189             new MediaAsset(R.raw.swirl_136x144_vp8, 136, 144),
190             new MediaAsset(R.raw.swirl_132x130_vp8, 132, 130),
191             new MediaAsset(R.raw.swirl_130x132_vp8, 130, 132));
192 
193     private static MediaAssets VP9_ASSETS = new MediaAssets(
194             MediaFormat.MIMETYPE_VIDEO_VP9,
195             new MediaAsset(R.raw.swirl_128x128_vp9, 128, 128),
196             new MediaAsset(R.raw.swirl_144x136_vp9, 144, 136),
197             new MediaAsset(R.raw.swirl_136x144_vp9, 136, 144),
198             new MediaAsset(R.raw.swirl_132x130_vp9, 132, 130),
199             new MediaAsset(R.raw.swirl_130x132_vp9, 130, 132));
200 
201     static final float SWIRL_FPS = 12.f;
202 
203     class Decoder {
204         final private String mName;
205         final private String mMime;
206         final private VideoCapabilities mCaps;
207         final private ArrayList<MediaAsset> mAssets;
208 
isFlexibleFormatSupported(CodecCapabilities caps)209         boolean isFlexibleFormatSupported(CodecCapabilities caps) {
210             for (int c : caps.colorFormats) {
211                 if (c == COLOR_FormatYUV420Flexible) {
212                     return true;
213                 }
214             }
215             return false;
216         }
217 
Decoder(String name, MediaAssets assets, CodecCapabilities caps)218         Decoder(String name, MediaAssets assets, CodecCapabilities caps) {
219             mName = name;
220             mMime = assets.getMime();
221             mCaps = caps.getVideoCapabilities();
222             mAssets = new ArrayList<MediaAsset>();
223 
224             for (MediaAsset asset : assets.getAssets()) {
225                 if (mCaps.areSizeAndRateSupported(asset.getWidth(), asset.getHeight(), SWIRL_FPS)
226                         && isFlexibleFormatSupported(caps)) {
227                     mAssets.add(asset);
228                 }
229             }
230         }
231 
videoDecode(int mode, boolean checkSwirl)232         public boolean videoDecode(int mode, boolean checkSwirl) {
233             boolean skipped = true;
234             for (MediaAsset asset: mAssets) {
235                 // TODO: loop over all supported image formats
236                 int imageFormat = ImageFormat.YUV_420_888;
237                 int colorFormat = COLOR_FormatYUV420Flexible;
238                 videoDecode(asset, imageFormat, colorFormat, mode, checkSwirl);
239                 skipped = false;
240             }
241             return skipped;
242         }
243 
videoDecode( MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl)244         private void videoDecode(
245                 MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl) {
246             int video = asset.getResource();
247             int width = asset.getWidth();
248             int height = asset.getHeight();
249 
250             if (DEBUG) Log.d(TAG, "videoDecode " + mName + " " + width + "x" + height);
251 
252             MediaCodec decoder = null;
253             AssetFileDescriptor vidFD = null;
254 
255             MediaExtractor extractor = null;
256             File tmpFile = null;
257             InputStream is = null;
258             FileOutputStream os = null;
259             MediaFormat mediaFormat = null;
260             try {
261                 extractor = new MediaExtractor();
262 
263                 try {
264                     vidFD = mResources.openRawResourceFd(video);
265                     extractor.setDataSource(
266                             vidFD.getFileDescriptor(), vidFD.getStartOffset(), vidFD.getLength());
267                 } catch (NotFoundException e) {
268                     // resource is compressed, uncompress locally
269                     String tmpName = "tempStream";
270                     tmpFile = File.createTempFile(tmpName, null, mContext.getCacheDir());
271                     is = mResources.openRawResource(video);
272                     os = new FileOutputStream(tmpFile);
273                     byte[] buf = new byte[1024];
274                     int len;
275                     while ((len = is.read(buf, 0, buf.length)) > 0) {
276                         os.write(buf, 0, len);
277                     }
278                     os.close();
279                     is.close();
280 
281                     extractor.setDataSource(tmpFile.getAbsolutePath());
282                 }
283 
284                 mediaFormat = extractor.getTrackFormat(0);
285                 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
286 
287                 // Create decoder
288                 decoder = MediaCodec.createByCodecName(mName);
289                 assertNotNull("couldn't create decoder" + mName, decoder);
290 
291                 decodeFramesToImage(
292                         decoder, extractor, mediaFormat,
293                         width, height, imageFormat, mode, checkSwirl);
294 
295                 decoder.stop();
296                 if (vidFD != null) {
297                     vidFD.close();
298                 }
299             } catch (Throwable e) {
300                 throw new RuntimeException("while " + mName + " decoding "
301                         + mResources.getResourceEntryName(video) + ": " + mediaFormat, e);
302             } finally {
303                 if (decoder != null) {
304                     decoder.release();
305                 }
306                 if (extractor != null) {
307                     extractor.release();
308                 }
309                 if (tmpFile != null) {
310                     tmpFile.delete();
311                 }
312             }
313         }
314     }
315 
decoders(MediaAssets assets, boolean goog)316     private Decoder[] decoders(MediaAssets assets, boolean goog) {
317         String mime = assets.getMime();
318         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
319         ArrayList<Decoder> result = new ArrayList<Decoder>();
320 
321         for (MediaCodecInfo info : mcl.getCodecInfos()) {
322             if (info.isEncoder()
323                     || info.getName().toLowerCase().startsWith("omx.google.") != goog) {
324                 continue;
325             }
326             CodecCapabilities caps = null;
327             try {
328                 caps = info.getCapabilitiesForType(mime);
329             } catch (IllegalArgumentException e) { // mime is not supported
330                 continue;
331             }
332             assertNotNull(info.getName() + " capabilties for " + mime + " returned null", caps);
333             result.add(new Decoder(info.getName(), assets, caps));
334         }
335         return result.toArray(new Decoder[result.size()]);
336     }
337 
goog(MediaAssets assets)338     private Decoder[] goog(MediaAssets assets) {
339         return decoders(assets, true /* goog */);
340     }
341 
other(MediaAssets assets)342     private Decoder[] other(MediaAssets assets) {
343         return decoders(assets, false /* goog */);
344     }
345 
googH265()346     private Decoder[] googH265()  { return goog(H265_ASSETS); }
googH264()347     private Decoder[] googH264()  { return goog(H264_ASSETS); }
googH263()348     private Decoder[] googH263()  { return goog(H263_ASSETS); }
googMpeg4()349     private Decoder[] googMpeg4() { return goog(MPEG4_ASSETS); }
googVP8()350     private Decoder[] googVP8()   { return goog(VP8_ASSETS); }
googVP9()351     private Decoder[] googVP9()   { return goog(VP9_ASSETS); }
352 
otherH265()353     private Decoder[] otherH265()  { return other(H265_ASSETS); }
otherH264()354     private Decoder[] otherH264()  { return other(H264_ASSETS); }
otherH263()355     private Decoder[] otherH263()  { return other(H263_ASSETS); }
otherMpeg4()356     private Decoder[] otherMpeg4() { return other(MPEG4_ASSETS); }
otherVP8()357     private Decoder[] otherVP8()   { return other(VP8_ASSETS); }
otherVP9()358     private Decoder[] otherVP9()   { return other(VP9_ASSETS); }
359 
testGoogH265Image()360     public void testGoogH265Image()   { swirlTest(googH265(),   MODE_IMAGE); }
testGoogH264Image()361     public void testGoogH264Image()   { swirlTest(googH264(),   MODE_IMAGE); }
testGoogH263Image()362     public void testGoogH263Image()   { swirlTest(googH263(),   MODE_IMAGE); }
testGoogMpeg4Image()363     public void testGoogMpeg4Image()  { swirlTest(googMpeg4(),  MODE_IMAGE); }
testGoogVP8Image()364     public void testGoogVP8Image()    { swirlTest(googVP8(),    MODE_IMAGE); }
testGoogVP9Image()365     public void testGoogVP9Image()    { swirlTest(googVP9(),    MODE_IMAGE); }
366 
testOtherH265Image()367     public void testOtherH265Image()  { swirlTest(otherH265(),  MODE_IMAGE); }
testOtherH264Image()368     public void testOtherH264Image()  { swirlTest(otherH264(),  MODE_IMAGE); }
testOtherH263Image()369     public void testOtherH263Image()  { swirlTest(otherH263(),  MODE_IMAGE); }
testOtherMpeg4Image()370     public void testOtherMpeg4Image() { swirlTest(otherMpeg4(), MODE_IMAGE); }
testOtherVP8Image()371     public void testOtherVP8Image()   { swirlTest(otherVP8(),   MODE_IMAGE); }
testOtherVP9Image()372     public void testOtherVP9Image()   { swirlTest(otherVP9(),   MODE_IMAGE); }
373 
testGoogH265ImageReader()374     public void testGoogH265ImageReader()   { swirlTest(googH265(),   MODE_IMAGEREADER); }
testGoogH264ImageReader()375     public void testGoogH264ImageReader()   { swirlTest(googH264(),   MODE_IMAGEREADER); }
testGoogH263ImageReader()376     public void testGoogH263ImageReader()   { swirlTest(googH263(),   MODE_IMAGEREADER); }
testGoogMpeg4ImageReader()377     public void testGoogMpeg4ImageReader()  { swirlTest(googMpeg4(),  MODE_IMAGEREADER); }
testGoogVP8ImageReader()378     public void testGoogVP8ImageReader()    { swirlTest(googVP8(),    MODE_IMAGEREADER); }
testGoogVP9ImageReader()379     public void testGoogVP9ImageReader()    { swirlTest(googVP9(),    MODE_IMAGEREADER); }
380 
testOtherH265ImageReader()381     public void testOtherH265ImageReader()  { swirlTest(otherH265(),  MODE_IMAGEREADER); }
testOtherH264ImageReader()382     public void testOtherH264ImageReader()  { swirlTest(otherH264(),  MODE_IMAGEREADER); }
testOtherH263ImageReader()383     public void testOtherH263ImageReader()  { swirlTest(otherH263(),  MODE_IMAGEREADER); }
testOtherMpeg4ImageReader()384     public void testOtherMpeg4ImageReader() { swirlTest(otherMpeg4(), MODE_IMAGEREADER); }
testOtherVP8ImageReader()385     public void testOtherVP8ImageReader()   { swirlTest(otherVP8(),   MODE_IMAGEREADER); }
testOtherVP9ImageReader()386     public void testOtherVP9ImageReader()   { swirlTest(otherVP9(),   MODE_IMAGEREADER); }
387 
388     /**
389      * Test ImageReader with 480x360 non-google AVC decoding for flexible yuv format
390      */
testHwAVCDecode360pForFlexibleYuv()391     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
392         Decoder[] decoders = other(new MediaAssets(
393                 MediaFormat.MIMETYPE_VIDEO_AVC,
394                 new MediaAsset(
395                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
396                         480 /* width */, 360 /* height */)));
397 
398         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
399     }
400 
401     /**
402      * Test ImageReader with 480x360 google (SW) AVC decoding for flexible yuv format
403      */
testSwAVCDecode360pForFlexibleYuv()404     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
405         Decoder[] decoders = goog(new MediaAssets(
406                 MediaFormat.MIMETYPE_VIDEO_AVC,
407                 new MediaAsset(
408                         R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
409                         480 /* width */, 360 /* height */)));
410 
411         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
412     }
413 
swirlTest(Decoder[] decoders, int mode)414     private void swirlTest(Decoder[] decoders, int mode) {
415         decodeTest(decoders, mode, true /* checkSwirl */);
416     }
417 
decodeTest(Decoder[] decoders, int mode, boolean checkSwirl)418     private void decodeTest(Decoder[] decoders, int mode, boolean checkSwirl) {
419         try {
420             boolean skipped = true;
421             for (Decoder codec : decoders) {
422                 if (codec.videoDecode(mode, checkSwirl)) {
423                     skipped = false;
424                 }
425             }
426             if (skipped) {
427                 MediaUtils.skipTest("decoder does not any of the input files");
428             }
429         } finally {
430             closeImageReader();
431         }
432     }
433 
434     private static class ImageListener implements ImageReader.OnImageAvailableListener {
435         private final LinkedBlockingQueue<Image> mQueue =
436                 new LinkedBlockingQueue<Image>();
437 
438         @Override
onImageAvailable(ImageReader reader)439         public void onImageAvailable(ImageReader reader) {
440             try {
441                 mQueue.put(reader.acquireNextImage());
442             } catch (InterruptedException e) {
443                 throw new UnsupportedOperationException(
444                         "Can't handle InterruptedException in onImageAvailable");
445             }
446         }
447 
448         /**
449          * Get an image from the image reader.
450          *
451          * @param timeout Timeout value for the wait.
452          * @return The image from the image reader.
453          */
getImage(long timeout)454         public Image getImage(long timeout) throws InterruptedException {
455             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
456             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
457             return image;
458         }
459     }
460 
461     /**
462      * Decode video frames to image reader.
463      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)464     private void decodeFramesToImage(
465             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
466             int width, int height, int imageFormat, int mode, boolean checkSwirl)
467             throws InterruptedException {
468         ByteBuffer[] decoderInputBuffers;
469         ByteBuffer[] decoderOutputBuffers;
470 
471         // Configure decoder.
472         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
473         if (mode == MODE_IMAGEREADER) {
474             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
475             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
476         } else {
477             assertEquals(mode, MODE_IMAGE);
478             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
479         }
480 
481         decoder.start();
482         decoderInputBuffers = decoder.getInputBuffers();
483         decoderOutputBuffers = decoder.getOutputBuffers();
484         extractor.selectTrack(0);
485 
486         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
487         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
488         boolean sawInputEOS = false;
489         boolean sawOutputEOS = false;
490         int outputFrameCount = 0;
491         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
492             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
493             // Feed input frame.
494             if (!sawInputEOS) {
495                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
496                 if (inputBufIndex >= 0) {
497                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
498                     int sampleSize =
499                         extractor.readSampleData(dstBuf, 0 /* offset */);
500 
501                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
502                         + inputBufIndex + "/" + sampleSize);
503 
504                     long presentationTimeUs = 0;
505 
506                     if (sampleSize < 0) {
507                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
508                         sawInputEOS = true;
509                         sampleSize = 0;
510                     } else {
511                         presentationTimeUs = extractor.getSampleTime();
512                     }
513 
514                     decoder.queueInputBuffer(
515                             inputBufIndex,
516                             0 /* offset */,
517                             sampleSize,
518                             presentationTimeUs,
519                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
520 
521                     if (!sawInputEOS) {
522                         extractor.advance();
523                     }
524                 }
525             }
526 
527             // Get output frame
528             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
529             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
530             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
531                 // no output available yet
532                 if (VERBOSE) Log.v(TAG, "no output frame available");
533             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
534                 // decoder output buffers changed, need update.
535                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
536                 decoderOutputBuffers = decoder.getOutputBuffers();
537             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
538                 // this happens before the first frame is returned.
539                 MediaFormat outFormat = decoder.getOutputFormat();
540                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
541             } else if (res < 0) {
542                 // Should be decoding error.
543                 fail("unexpected result from deocder.dequeueOutputBuffer: " + res);
544             } else {
545                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
546                     sawOutputEOS = true;
547                 }
548 
549                 // res >= 0: normal decoding case, copy the output buffer.
550                 // Will use it as reference to valid the ImageReader output
551                 // Some decoders output a 0-sized buffer at the end. Ignore those.
552                 boolean doRender = (info.size != 0);
553 
554                 if (doRender) {
555                     outputFrameCount++;
556                     String fileName = DEBUG_FILE_NAME_BASE + MediaUtils.getTestName()
557                             + (mode == MODE_IMAGE ? "_image_" : "_reader_")
558                             + width + "x" + height + "_" + outputFrameCount + ".yuv";
559 
560                     Image image = null;
561                     try {
562                         if (mode == MODE_IMAGE) {
563                             image = decoder.getOutputImage(res);
564                         } else {
565                             decoder.releaseOutputBuffer(res, doRender);
566                             res = -1;
567                             // Read image and verify
568                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
569                         }
570                         validateImage(image, width, height, imageFormat, fileName);
571 
572                         if (checkSwirl) {
573                             try {
574                                 validateSwirl(image);
575                             } catch (Throwable e) {
576                                 dumpFile(fileName, getDataFromImage(image));
577                                 throw e;
578                             }
579                         }
580                     } finally {
581                         if (image != null) {
582                             image.close();
583                         }
584                     }
585                 }
586 
587                 if (res >= 0) {
588                     decoder.releaseOutputBuffer(res, false /* render */);
589                 }
590             }
591         }
592     }
593 
594     /**
595      * Validate image based on format and size.
596      *
597      * @param image The image to be validated.
598      * @param width The image width.
599      * @param height The image height.
600      * @param format The image format.
601      * @param filePath The debug dump file path, null if don't want to dump to file.
602      */
validateImage( Image image, int width, int height, int format, String filePath)603     public static void validateImage(
604             Image image, int width, int height, int format, String filePath) {
605         if (VERBOSE) {
606             Plane[] imagePlanes = image.getPlanes();
607             Log.v(TAG, "Image " + filePath + " Info:");
608             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
609             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
610             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
611         }
612 
613         assertNotNull("Input image is invalid", image);
614         assertEquals("Format doesn't match", format, image.getFormat());
615         assertEquals("Width doesn't match", width, image.getCropRect().width());
616         assertEquals("Height doesn't match", height, image.getCropRect().height());
617 
618         if(VERBOSE) Log.v(TAG, "validating Image");
619         byte[] data = getDataFromImage(image);
620         assertTrue("Invalid image data", data != null && data.length > 0);
621 
622         validateYuvData(data, width, height, format, image.getTimestamp());
623 
624         if (VERBOSE && filePath != null) {
625             dumpFile(filePath, data);
626         }
627     }
628 
validateSwirl(Image image)629     private static void validateSwirl(Image image) {
630         Rect crop = image.getCropRect();
631         final int NUM_SIDES = 4;
632         final int step = 8;      // the width of the layers
633         long[][] rawStats = new long[NUM_SIDES][10];
634         int[][] colors = new int[][] {
635             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
636         };
637 
638         // successively accumulate statistics for each layer of the swirl
639         // by using overlapping rectangles, and the observation that
640         // layer_i = rectangle_i - rectangle_(i+1)
641         int lastLayer = 0;
642         int layer = 0;
643         boolean lastLayerValid = false;
644         for (int pos = 0; ; pos += step) {
645             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
646             if (area.isEmpty()) {
647                 break;
648             }
649             area.offset(crop.left, crop.top);
650             area.intersect(crop);
651             for (int lr = 0; lr < 2; ++lr) {
652                 long[] oneStat = CodecUtils.getRawStats(image, area);
653                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
654                                     + lastLayer + ": " + Arrays.toString(oneStat));
655                 for (int i = 0; i < oneStat.length; i++) {
656                     rawStats[layer][i] += oneStat[i];
657                     if (lastLayerValid) {
658                         rawStats[lastLayer][i] -= oneStat[i];
659                     }
660                 }
661                 if (VERBOSE && lastLayerValid) {
662                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
663                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
664                 }
665                 // switch to the opposite side
666                 layer ^= 2;      // NUM_SIDES / 2
667                 lastLayer ^= 2;  // NUM_SIDES / 2
668                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
669             }
670 
671             lastLayer = layer;
672             lastLayerValid = true;
673             layer = (layer + 1) % NUM_SIDES;
674         }
675 
676         for (layer = 0; layer < NUM_SIDES; ++layer) {
677             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
678             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
679             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
680 
681             // check layer uniformity
682             for (int i = 0; i < 3; i++) {
683                 assertTrue("color of layer-" + layer + " is not uniform: "
684                         + Arrays.toString(stats),
685                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
686             }
687 
688             // check layer color
689             for (int i = 0; i < 3; i++) {
690                 assertTrue("color of layer-" + layer + " mismatches target "
691                         + Arrays.toString(colors[layer]) + " vs "
692                         + Arrays.toString(Arrays.copyOf(stats, 3)),
693                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
694             }
695         }
696     }
697 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)698     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
699             long ts) {
700 
701         assertTrue("YUV format must be one of the YUV_420_888, NV21, or YV12",
702                 format == ImageFormat.YUV_420_888 ||
703                 format == ImageFormat.NV21 ||
704                 format == ImageFormat.YV12);
705 
706         if (VERBOSE) Log.v(TAG, "Validating YUV data");
707         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
708         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
709     }
710 
checkYuvFormat(int format)711     private static void checkYuvFormat(int format) {
712         if ((format != ImageFormat.YUV_420_888) &&
713                 (format != ImageFormat.NV21) &&
714                 (format != ImageFormat.YV12)) {
715             fail("Wrong formats: " + format);
716         }
717     }
718     /**
719      * <p>Check android image format validity for an image, only support below formats:</p>
720      *
721      * <p>Valid formats are YUV_420_888/NV21/YV12 for video decoder</p>
722      */
checkAndroidImageFormat(Image image)723     private static void checkAndroidImageFormat(Image image) {
724         int format = image.getFormat();
725         Plane[] planes = image.getPlanes();
726         switch (format) {
727             case ImageFormat.YUV_420_888:
728             case ImageFormat.NV21:
729             case ImageFormat.YV12:
730                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
731                 break;
732             default:
733                 fail("Unsupported Image Format: " + format);
734         }
735     }
736 
737     /**
738      * Get a byte array image data from an Image object.
739      * <p>
740      * Read data from all planes of an Image into a contiguous unpadded,
741      * unpacked 1-D linear byte array, such that it can be write into disk, or
742      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12
743      * input Image format.
744      * </p>
745      * <p>
746      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
747      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
748      * (xstride = width, ystride = height for chroma and luma components).
749      * </p>
750      */
getDataFromImage(Image image)751     private static byte[] getDataFromImage(Image image) {
752         assertNotNull("Invalid image:", image);
753         Rect crop = image.getCropRect();
754         int format = image.getFormat();
755         int width = crop.width();
756         int height = crop.height();
757         int rowStride, pixelStride;
758         byte[] data = null;
759 
760         // Read image data
761         Plane[] planes = image.getPlanes();
762         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
763 
764         // Check image validity
765         checkAndroidImageFormat(image);
766 
767         ByteBuffer buffer = null;
768 
769         int offset = 0;
770         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
771         byte[] rowData = new byte[planes[0].getRowStride()];
772         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
773         for (int i = 0; i < planes.length; i++) {
774             int shift = (i == 0) ? 0 : 1;
775             buffer = planes[i].getBuffer();
776             assertNotNull("Fail to get bytebuffer from plane", buffer);
777             rowStride = planes[i].getRowStride();
778             pixelStride = planes[i].getPixelStride();
779             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
780             if (VERBOSE) {
781                 Log.v(TAG, "pixelStride " + pixelStride);
782                 Log.v(TAG, "rowStride " + rowStride);
783                 Log.v(TAG, "width " + width);
784                 Log.v(TAG, "height " + height);
785             }
786             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
787             int w = crop.width() >> shift;
788             int h = crop.height() >> shift;
789             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
790             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
791             for (int row = 0; row < h; row++) {
792                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
793                 int length;
794                 if (pixelStride == bytesPerPixel) {
795                     // Special case: optimized read of the entire row
796                     length = w * bytesPerPixel;
797                     buffer.get(data, offset, length);
798                     offset += length;
799                 } else {
800                     // Generic case: should work for any pixelStride but slower.
801                     // Use intermediate buffer to avoid read byte-by-byte from
802                     // DirectByteBuffer, which is very bad for performance
803                     length = (w - 1) * pixelStride + bytesPerPixel;
804                     buffer.get(rowData, 0, length);
805                     for (int col = 0; col < w; col++) {
806                         data[offset++] = rowData[col * pixelStride];
807                     }
808                 }
809                 // Advance buffer the remainder of the row stride
810                 if (row < h - 1) {
811                     buffer.position(buffer.position() + rowStride - length);
812                 }
813             }
814             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
815         }
816         return data;
817     }
818 
dumpFile(String fileName, byte[] data)819     private static void dumpFile(String fileName, byte[] data) {
820         assertNotNull("fileName must not be null", fileName);
821         assertNotNull("data must not be null", data);
822 
823         FileOutputStream outStream;
824         try {
825             Log.v(TAG, "output will be saved as " + fileName);
826             outStream = new FileOutputStream(fileName);
827         } catch (IOException ioe) {
828             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
829         }
830 
831         try {
832             outStream.write(data);
833             outStream.close();
834         } catch (IOException ioe) {
835             throw new RuntimeException("failed writing data to file " + fileName, ioe);
836         }
837     }
838 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)839     private void createImageReader(
840             int width, int height, int format, int maxNumImages,
841             ImageReader.OnImageAvailableListener listener)  {
842         closeImageReader();
843 
844         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
845         mReaderSurface = mReader.getSurface();
846         mReader.setOnImageAvailableListener(listener, mHandler);
847         if (VERBOSE) {
848             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
849                     format));
850         }
851     }
852 
853     /**
854      * Close the pending images then close current active {@link ImageReader} object.
855      */
closeImageReader()856     private void closeImageReader() {
857         if (mReader != null) {
858             try {
859                 // Close all possible pending images first.
860                 Image image = mReader.acquireLatestImage();
861                 if (image != null) {
862                     image.close();
863                 }
864             } finally {
865                 mReader.close();
866                 mReader = null;
867             }
868         }
869     }
870 }
871