1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
20 
21 import android.content.Context;
22 import android.content.res.AssetFileDescriptor;
23 import android.content.res.Resources.NotFoundException;
24 import android.graphics.ImageFormat;
25 import android.graphics.Rect;
26 import android.media.Image;
27 import android.media.Image.Plane;
28 import android.media.ImageReader;
29 import android.media.MediaCodec;
30 import android.media.MediaCodecInfo;
31 import android.media.MediaCodecInfo.CodecCapabilities;
32 import android.media.MediaCodecInfo.VideoCapabilities;
33 import android.media.MediaCodecList;
34 import android.media.MediaExtractor;
35 import android.media.MediaFormat;
36 import android.media.cts.CodecUtils;
37 import android.os.Handler;
38 import android.os.HandlerThread;
39 import android.os.ParcelFileDescriptor;
40 import android.platform.test.annotations.AppModeFull;
41 import android.platform.test.annotations.Presubmit;
42 import android.platform.test.annotations.RequiresDevice;
43 import android.test.AndroidTestCase;
44 import android.util.Log;
45 import android.view.Surface;
46 
47 import androidx.test.filters.FlakyTest;
48 import androidx.test.filters.SmallTest;
49 
50 import com.android.compatibility.common.util.MediaUtils;
51 
52 import java.io.File;
53 import java.io.FileInputStream;
54 import java.io.FileNotFoundException;
55 import java.io.FileOutputStream;
56 import java.io.IOException;
57 import java.io.InputStream;
58 import java.nio.ByteBuffer;
59 import java.util.ArrayList;
60 import java.util.Arrays;
61 import java.util.concurrent.LinkedBlockingQueue;
62 import java.util.concurrent.TimeUnit;
63 
64 /**
65  * Basic test for ImageReader APIs.
66  * <p>
67  * It uses MediaCodec to decode a short video stream, send the video frames to
68  * the surface provided by ImageReader. Then compare if output buffers of the
69  * ImageReader matches the output buffers of the MediaCodec. The video format
70  * used here is AVC although the compression format doesn't matter for this
71  * test. For decoder test, hw and sw decoders are tested,
72  * </p>
73  */
74 @Presubmit
75 @SmallTest
76 @RequiresDevice
77 @AppModeFull(reason = "Instant apps cannot access the SD card")
78 public class ImageReaderDecoderTest extends AndroidTestCase {
79     private static final String TAG = "ImageReaderDecoderTest";
80     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
81     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
82     private static final long DEFAULT_TIMEOUT_US = 10000;
83     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
84     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
85     private static final int NUM_FRAME_DECODED = 100;
86     // video decoders only support a single outstanding image with the consumer
87     private static final int MAX_NUM_IMAGES = 1;
88     private static final float COLOR_STDEV_ALLOWANCE = 5f;
89     private static final float COLOR_DELTA_ALLOWANCE = 5f;
90 
91     private final static int MODE_IMAGEREADER = 0;
92     private final static int MODE_IMAGE       = 1;
93 
94     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
95     private ImageReader mReader;
96     private Surface mReaderSurface;
97     private HandlerThread mHandlerThread;
98     private Handler mHandler;
99     private ImageListener mImageListener;
100 
101     @Override
setContext(Context context)102     public void setContext(Context context) {
103         super.setContext(context);
104     }
105 
106     @Override
setUp()107     protected void setUp() throws Exception {
108         super.setUp();
109         mHandlerThread = new HandlerThread(TAG);
110         mHandlerThread.start();
111         mHandler = new Handler(mHandlerThread.getLooper());
112         mImageListener = new ImageListener();
113     }
114 
115     @Override
tearDown()116     protected void tearDown() throws Exception {
117         mHandlerThread.quitSafely();
118         mHandler = null;
119     }
120 
121     static class MediaAsset {
MediaAsset(String resource, int width, int height)122         public MediaAsset(String resource, int width, int height) {
123             mResource = resource;
124             mWidth = width;
125             mHeight = height;
126         }
127 
getWidth()128         public int getWidth() {
129             return mWidth;
130         }
131 
getHeight()132         public int getHeight() {
133             return mHeight;
134         }
135 
getResource()136         public String getResource() {
137             return mResource;
138         }
139 
140         private final String mResource;
141         private final int mWidth;
142         private final int mHeight;
143     }
144 
145     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)146         public MediaAssets(String mime, MediaAsset... assets) {
147             mMime = mime;
148             mAssets = assets;
149         }
150 
getMime()151         public String getMime() {
152             return mMime;
153         }
154 
getAssets()155         public MediaAsset[] getAssets() {
156             return mAssets;
157         }
158 
159         private final String mMime;
160         private final MediaAsset[] mAssets;
161     }
162 
163     static final String mInpPrefix = WorkDir.getMediaDirString();
getAssetFileDescriptorFor(final String res)164     protected AssetFileDescriptor getAssetFileDescriptorFor(final String res)
165             throws FileNotFoundException {
166         Preconditions.assertTestFileExists(mInpPrefix + res);
167         File inpFile = new File(mInpPrefix + res);
168         ParcelFileDescriptor parcelFD =
169                 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY);
170         return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize());
171     }
172 
173     private static MediaAssets H263_ASSETS = new MediaAssets(
174             MediaFormat.MIMETYPE_VIDEO_H263,
175             new MediaAsset("swirl_176x144_h263.3gp", 176, 144),
176             new MediaAsset("swirl_352x288_h263.3gp", 352, 288),
177             new MediaAsset("swirl_128x96_h263.3gp", 128, 96));
178 
179     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
180             MediaFormat.MIMETYPE_VIDEO_MPEG4,
181             new MediaAsset("swirl_128x128_mpeg4.mp4", 128, 128),
182             new MediaAsset("swirl_144x136_mpeg4.mp4", 144, 136),
183             new MediaAsset("swirl_136x144_mpeg4.mp4", 136, 144),
184             new MediaAsset("swirl_132x130_mpeg4.mp4", 132, 130),
185             new MediaAsset("swirl_130x132_mpeg4.mp4", 130, 132));
186 
187     private static MediaAssets H264_ASSETS = new MediaAssets(
188             MediaFormat.MIMETYPE_VIDEO_AVC,
189             new MediaAsset("swirl_128x128_h264.mp4", 128, 128),
190             new MediaAsset("swirl_144x136_h264.mp4", 144, 136),
191             new MediaAsset("swirl_136x144_h264.mp4", 136, 144),
192             new MediaAsset("swirl_132x130_h264.mp4", 132, 130),
193             new MediaAsset("swirl_130x132_h264.mp4", 130, 132));
194 
195     private static MediaAssets H265_ASSETS = new MediaAssets(
196             MediaFormat.MIMETYPE_VIDEO_HEVC,
197             new MediaAsset("swirl_128x128_h265.mp4", 128, 128),
198             new MediaAsset("swirl_144x136_h265.mp4", 144, 136),
199             new MediaAsset("swirl_136x144_h265.mp4", 136, 144),
200             new MediaAsset("swirl_132x130_h265.mp4", 132, 130),
201             new MediaAsset("swirl_130x132_h265.mp4", 130, 132));
202 
203     private static MediaAssets VP8_ASSETS = new MediaAssets(
204             MediaFormat.MIMETYPE_VIDEO_VP8,
205             new MediaAsset("swirl_128x128_vp8.webm", 128, 128),
206             new MediaAsset("swirl_144x136_vp8.webm", 144, 136),
207             new MediaAsset("swirl_136x144_vp8.webm", 136, 144),
208             new MediaAsset("swirl_132x130_vp8.webm", 132, 130),
209             new MediaAsset("swirl_130x132_vp8.webm", 130, 132));
210 
211     private static MediaAssets VP9_ASSETS = new MediaAssets(
212             MediaFormat.MIMETYPE_VIDEO_VP9,
213             new MediaAsset("swirl_128x128_vp9.webm", 128, 128),
214             new MediaAsset("swirl_144x136_vp9.webm", 144, 136),
215             new MediaAsset("swirl_136x144_vp9.webm", 136, 144),
216             new MediaAsset("swirl_132x130_vp9.webm", 132, 130),
217             new MediaAsset("swirl_130x132_vp9.webm", 130, 132));
218 
219     static final float SWIRL_FPS = 12.f;
220 
221     class Decoder {
222         final private String mName;
223         final private String mMime;
224         final private VideoCapabilities mCaps;
225         final private ArrayList<MediaAsset> mAssets;
226 
isFlexibleFormatSupported(CodecCapabilities caps)227         boolean isFlexibleFormatSupported(CodecCapabilities caps) {
228             for (int c : caps.colorFormats) {
229                 if (c == COLOR_FormatYUV420Flexible) {
230                     return true;
231                 }
232             }
233             return false;
234         }
235 
Decoder(String name, MediaAssets assets, CodecCapabilities caps)236         Decoder(String name, MediaAssets assets, CodecCapabilities caps) {
237             mName = name;
238             mMime = assets.getMime();
239             mCaps = caps.getVideoCapabilities();
240             mAssets = new ArrayList<MediaAsset>();
241 
242             for (MediaAsset asset : assets.getAssets()) {
243                 if (mCaps.areSizeAndRateSupported(asset.getWidth(), asset.getHeight(), SWIRL_FPS)
244                         && isFlexibleFormatSupported(caps)) {
245                     mAssets.add(asset);
246                 }
247             }
248         }
249 
videoDecode(int mode, boolean checkSwirl)250         public boolean videoDecode(int mode, boolean checkSwirl) {
251             boolean skipped = true;
252             for (MediaAsset asset: mAssets) {
253                 // TODO: loop over all supported image formats
254                 int imageFormat = ImageFormat.YUV_420_888;
255                 int colorFormat = COLOR_FormatYUV420Flexible;
256                 videoDecode(asset, imageFormat, colorFormat, mode, checkSwirl);
257                 skipped = false;
258             }
259             return skipped;
260         }
261 
videoDecode( MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl)262         private void videoDecode(
263                 MediaAsset asset, int imageFormat, int colorFormat, int mode, boolean checkSwirl) {
264             String video = asset.getResource();
265             int width = asset.getWidth();
266             int height = asset.getHeight();
267 
268             if (DEBUG) Log.d(TAG, "videoDecode " + mName + " " + width + "x" + height);
269 
270             MediaCodec decoder = null;
271             AssetFileDescriptor vidFD = null;
272 
273             MediaExtractor extractor = null;
274             File tmpFile = null;
275             InputStream is = null;
276             FileOutputStream os = null;
277             MediaFormat mediaFormat = null;
278             try {
279                 extractor = new MediaExtractor();
280 
281                 try {
282                     vidFD = getAssetFileDescriptorFor(video);
283                     extractor.setDataSource(
284                             vidFD.getFileDescriptor(), vidFD.getStartOffset(), vidFD.getLength());
285                 } catch (NotFoundException e) {
286                     // resource is compressed, uncompress locally
287                     String tmpName = "tempStream";
288                     tmpFile = File.createTempFile(tmpName, null, mContext.getCacheDir());
289                     is = new FileInputStream(mInpPrefix + video);
290                     os = new FileOutputStream(tmpFile);
291                     byte[] buf = new byte[1024];
292                     int len;
293                     while ((len = is.read(buf, 0, buf.length)) > 0) {
294                         os.write(buf, 0, len);
295                     }
296                     os.close();
297                     is.close();
298 
299                     extractor.setDataSource(tmpFile.getAbsolutePath());
300                 }
301 
302                 mediaFormat = extractor.getTrackFormat(0);
303                 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
304 
305                 // Create decoder
306                 decoder = MediaCodec.createByCodecName(mName);
307                 assertNotNull("couldn't create decoder" + mName, decoder);
308 
309                 decodeFramesToImage(
310                         decoder, extractor, mediaFormat,
311                         width, height, imageFormat, mode, checkSwirl);
312 
313                 decoder.stop();
314                 if (vidFD != null) {
315                     vidFD.close();
316                 }
317             } catch (Throwable e) {
318                 throw new RuntimeException(
319                         "while " + mName + " decoding " + video + ": " + mediaFormat, e);
320             } finally {
321                 if (decoder != null) {
322                     decoder.release();
323                 }
324                 if (extractor != null) {
325                     extractor.release();
326                 }
327                 if (tmpFile != null) {
328                     tmpFile.delete();
329                 }
330             }
331         }
332     }
333 
decoders(MediaAssets assets, boolean goog)334     private Decoder[] decoders(MediaAssets assets, boolean goog) {
335         String mime = assets.getMime();
336         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
337         ArrayList<Decoder> result = new ArrayList<Decoder>();
338 
339         for (MediaCodecInfo info : mcl.getCodecInfos()) {
340             if (info.isEncoder() || info.isAlias() || !info.isVendor() != goog) {
341                 continue;
342             }
343             CodecCapabilities caps = null;
344             try {
345                 caps = info.getCapabilitiesForType(mime);
346             } catch (IllegalArgumentException e) { // mime is not supported
347                 continue;
348             }
349             assertNotNull(info.getName() + " capabilties for " + mime + " returned null", caps);
350             result.add(new Decoder(info.getName(), assets, caps));
351         }
352         return result.toArray(new Decoder[result.size()]);
353     }
354 
goog(MediaAssets assets)355     private Decoder[] goog(MediaAssets assets) {
356         return decoders(assets, true /* goog */);
357     }
358 
other(MediaAssets assets)359     private Decoder[] other(MediaAssets assets) {
360         return decoders(assets, false /* goog */);
361     }
362 
googH265()363     private Decoder[] googH265()  { return goog(H265_ASSETS); }
googH264()364     private Decoder[] googH264()  { return goog(H264_ASSETS); }
googH263()365     private Decoder[] googH263()  { return goog(H263_ASSETS); }
googMpeg4()366     private Decoder[] googMpeg4() { return goog(MPEG4_ASSETS); }
googVP8()367     private Decoder[] googVP8()   { return goog(VP8_ASSETS); }
googVP9()368     private Decoder[] googVP9()   { return goog(VP9_ASSETS); }
369 
otherH265()370     private Decoder[] otherH265()  { return other(H265_ASSETS); }
otherH264()371     private Decoder[] otherH264()  { return other(H264_ASSETS); }
otherH263()372     private Decoder[] otherH263()  { return other(H263_ASSETS); }
otherMpeg4()373     private Decoder[] otherMpeg4() { return other(MPEG4_ASSETS); }
otherVP8()374     private Decoder[] otherVP8()   { return other(VP8_ASSETS); }
otherVP9()375     private Decoder[] otherVP9()   { return other(VP9_ASSETS); }
376 
testGoogH265Image()377     public void testGoogH265Image()   { swirlTest(googH265(),   MODE_IMAGE); }
testGoogH264Image()378     public void testGoogH264Image()   { swirlTest(googH264(),   MODE_IMAGE); }
testGoogH263Image()379     public void testGoogH263Image()   { swirlTest(googH263(),   MODE_IMAGE); }
testGoogMpeg4Image()380     public void testGoogMpeg4Image()  { swirlTest(googMpeg4(),  MODE_IMAGE); }
testGoogVP8Image()381     public void testGoogVP8Image()    { swirlTest(googVP8(),    MODE_IMAGE); }
testGoogVP9Image()382     public void testGoogVP9Image()    { swirlTest(googVP9(),    MODE_IMAGE); }
383 
testOtherH265Image()384     public void testOtherH265Image()  { swirlTest(otherH265(),  MODE_IMAGE); }
testOtherH264Image()385     public void testOtherH264Image()  { swirlTest(otherH264(),  MODE_IMAGE); }
testOtherH263Image()386     public void testOtherH263Image()  { swirlTest(otherH263(),  MODE_IMAGE); }
testOtherMpeg4Image()387     public void testOtherMpeg4Image() { swirlTest(otherMpeg4(), MODE_IMAGE); }
testOtherVP8Image()388     public void testOtherVP8Image()   { swirlTest(otherVP8(),   MODE_IMAGE); }
testOtherVP9Image()389     public void testOtherVP9Image()   { swirlTest(otherVP9(),   MODE_IMAGE); }
390 
testGoogH265ImageReader()391     public void testGoogH265ImageReader()   { swirlTest(googH265(),   MODE_IMAGEREADER); }
testGoogH264ImageReader()392     public void testGoogH264ImageReader()   { swirlTest(googH264(),   MODE_IMAGEREADER); }
testGoogH263ImageReader()393     public void testGoogH263ImageReader()   { swirlTest(googH263(),   MODE_IMAGEREADER); }
testGoogMpeg4ImageReader()394     public void testGoogMpeg4ImageReader()  { swirlTest(googMpeg4(),  MODE_IMAGEREADER); }
testGoogVP8ImageReader()395     public void testGoogVP8ImageReader()    { swirlTest(googVP8(),    MODE_IMAGEREADER); }
testGoogVP9ImageReader()396     public void testGoogVP9ImageReader()    { swirlTest(googVP9(),    MODE_IMAGEREADER); }
397 
398     // TODO: b/186001256
399     @FlakyTest
testOtherH265ImageReader()400     public void testOtherH265ImageReader()  { swirlTest(otherH265(),  MODE_IMAGEREADER); }
401     @FlakyTest
testOtherH264ImageReader()402     public void testOtherH264ImageReader()  { swirlTest(otherH264(),  MODE_IMAGEREADER); }
testOtherH263ImageReader()403     public void testOtherH263ImageReader()  { swirlTest(otherH263(),  MODE_IMAGEREADER); }
testOtherMpeg4ImageReader()404     public void testOtherMpeg4ImageReader() { swirlTest(otherMpeg4(), MODE_IMAGEREADER); }
405     @FlakyTest
testOtherVP8ImageReader()406     public void testOtherVP8ImageReader()   { swirlTest(otherVP8(),   MODE_IMAGEREADER); }
407     @FlakyTest
testOtherVP9ImageReader()408     public void testOtherVP9ImageReader()   { swirlTest(otherVP9(),   MODE_IMAGEREADER); }
409 
410     /**
411      * Test ImageReader with 480x360 non-google AVC decoding for flexible yuv format
412      */
testHwAVCDecode360pForFlexibleYuv()413     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
414         Decoder[] decoders = other(new MediaAssets(
415                 MediaFormat.MIMETYPE_VIDEO_AVC,
416                 new MediaAsset(
417                         "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
418                         480 /* width */, 360 /* height */)));
419 
420         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
421     }
422 
423     /**
424      * Test ImageReader with 480x360 google (SW) AVC decoding for flexible yuv format
425      */
testSwAVCDecode360pForFlexibleYuv()426     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
427         Decoder[] decoders = goog(new MediaAssets(
428                 MediaFormat.MIMETYPE_VIDEO_AVC,
429                 new MediaAsset(
430                         "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
431                         480 /* width */, 360 /* height */)));
432 
433         decodeTest(decoders, MODE_IMAGEREADER, false /* checkSwirl */);
434     }
435 
swirlTest(Decoder[] decoders, int mode)436     private void swirlTest(Decoder[] decoders, int mode) {
437         decodeTest(decoders, mode, true /* checkSwirl */);
438     }
439 
decodeTest(Decoder[] decoders, int mode, boolean checkSwirl)440     private void decodeTest(Decoder[] decoders, int mode, boolean checkSwirl) {
441         try {
442             boolean skipped = true;
443             for (Decoder codec : decoders) {
444                 if (codec.videoDecode(mode, checkSwirl)) {
445                     skipped = false;
446                 }
447             }
448             if (skipped) {
449                 MediaUtils.skipTest("decoder does not any of the input files");
450             }
451         } finally {
452             closeImageReader();
453         }
454     }
455 
456     private static class ImageListener implements ImageReader.OnImageAvailableListener {
457         private final LinkedBlockingQueue<Image> mQueue =
458                 new LinkedBlockingQueue<Image>();
459 
460         @Override
onImageAvailable(ImageReader reader)461         public void onImageAvailable(ImageReader reader) {
462             try {
463                 mQueue.put(reader.acquireNextImage());
464             } catch (InterruptedException e) {
465                 throw new UnsupportedOperationException(
466                         "Can't handle InterruptedException in onImageAvailable");
467             }
468         }
469 
470         /**
471          * Get an image from the image reader.
472          *
473          * @param timeout Timeout value for the wait.
474          * @return The image from the image reader.
475          */
getImage(long timeout)476         public Image getImage(long timeout) throws InterruptedException {
477             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
478             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
479             return image;
480         }
481     }
482 
483     /**
484      * Decode video frames to image reader.
485      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)486     private void decodeFramesToImage(
487             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
488             int width, int height, int imageFormat, int mode, boolean checkSwirl)
489             throws InterruptedException {
490         ByteBuffer[] decoderInputBuffers;
491         ByteBuffer[] decoderOutputBuffers;
492 
493         // Configure decoder.
494         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
495         if (mode == MODE_IMAGEREADER) {
496             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
497             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
498         } else {
499             assertEquals(mode, MODE_IMAGE);
500             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
501         }
502 
503         decoder.start();
504         decoderInputBuffers = decoder.getInputBuffers();
505         decoderOutputBuffers = decoder.getOutputBuffers();
506         extractor.selectTrack(0);
507 
508         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
509         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
510         boolean sawInputEOS = false;
511         boolean sawOutputEOS = false;
512         int outputFrameCount = 0;
513         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
514             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
515             // Feed input frame.
516             if (!sawInputEOS) {
517                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
518                 if (inputBufIndex >= 0) {
519                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
520                     int sampleSize =
521                         extractor.readSampleData(dstBuf, 0 /* offset */);
522 
523                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
524                         + inputBufIndex + "/" + sampleSize);
525 
526                     long presentationTimeUs = 0;
527 
528                     if (sampleSize < 0) {
529                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
530                         sawInputEOS = true;
531                         sampleSize = 0;
532                     } else {
533                         presentationTimeUs = extractor.getSampleTime();
534                     }
535 
536                     decoder.queueInputBuffer(
537                             inputBufIndex,
538                             0 /* offset */,
539                             sampleSize,
540                             presentationTimeUs,
541                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
542 
543                     if (!sawInputEOS) {
544                         extractor.advance();
545                     }
546                 }
547             }
548 
549             // Get output frame
550             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
551             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
552             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
553                 // no output available yet
554                 if (VERBOSE) Log.v(TAG, "no output frame available");
555             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
556                 // decoder output buffers changed, need update.
557                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
558                 decoderOutputBuffers = decoder.getOutputBuffers();
559             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
560                 // this happens before the first frame is returned.
561                 MediaFormat outFormat = decoder.getOutputFormat();
562                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
563             } else if (res < 0) {
564                 // Should be decoding error.
565                 fail("unexpected result from deocder.dequeueOutputBuffer: " + res);
566             } else {
567                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
568                     sawOutputEOS = true;
569                 }
570 
571                 // res >= 0: normal decoding case, copy the output buffer.
572                 // Will use it as reference to valid the ImageReader output
573                 // Some decoders output a 0-sized buffer at the end. Ignore those.
574                 boolean doRender = (info.size != 0);
575 
576                 if (doRender) {
577                     outputFrameCount++;
578                     String fileName = DEBUG_FILE_NAME_BASE + MediaUtils.getTestName()
579                             + (mode == MODE_IMAGE ? "_image_" : "_reader_")
580                             + width + "x" + height + "_" + outputFrameCount + ".yuv";
581 
582                     Image image = null;
583                     try {
584                         if (mode == MODE_IMAGE) {
585                             image = decoder.getOutputImage(res);
586                         } else {
587                             decoder.releaseOutputBuffer(res, doRender);
588                             res = -1;
589                             // Read image and verify
590                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
591                         }
592                         validateImage(image, width, height, imageFormat, fileName);
593 
594                         if (checkSwirl) {
595                             try {
596                                 validateSwirl(image);
597                             } catch (Throwable e) {
598                                 dumpFile(fileName, getDataFromImage(image));
599                                 throw e;
600                             }
601                         }
602                     } finally {
603                         if (image != null) {
604                             image.close();
605                         }
606                     }
607                 }
608 
609                 if (res >= 0) {
610                     decoder.releaseOutputBuffer(res, false /* render */);
611                 }
612             }
613         }
614     }
615 
616     /**
617      * Validate image based on format and size.
618      *
619      * @param image The image to be validated.
620      * @param width The image width.
621      * @param height The image height.
622      * @param format The image format.
623      * @param filePath The debug dump file path, null if don't want to dump to file.
624      */
validateImage( Image image, int width, int height, int format, String filePath)625     public static void validateImage(
626             Image image, int width, int height, int format, String filePath) {
627         if (VERBOSE) {
628             Plane[] imagePlanes = image.getPlanes();
629             Log.v(TAG, "Image " + filePath + " Info:");
630             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
631             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
632             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
633         }
634 
635         assertNotNull("Input image is invalid", image);
636         assertEquals("Format doesn't match", format, image.getFormat());
637         assertEquals("Width doesn't match", width, image.getCropRect().width());
638         assertEquals("Height doesn't match", height, image.getCropRect().height());
639 
640         if(VERBOSE) Log.v(TAG, "validating Image");
641         byte[] data = getDataFromImage(image);
642         assertTrue("Invalid image data", data != null && data.length > 0);
643 
644         validateYuvData(data, width, height, format, image.getTimestamp());
645 
646         if (VERBOSE && filePath != null) {
647             dumpFile(filePath, data);
648         }
649     }
650 
validateSwirl(Image image)651     private static void validateSwirl(Image image) {
652         Rect crop = image.getCropRect();
653         final int NUM_SIDES = 4;
654         final int step = 8;      // the width of the layers
655         long[][] rawStats = new long[NUM_SIDES][10];
656         int[][] colors = new int[][] {
657             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
658         };
659 
660         // successively accumulate statistics for each layer of the swirl
661         // by using overlapping rectangles, and the observation that
662         // layer_i = rectangle_i - rectangle_(i+1)
663         int lastLayer = 0;
664         int layer = 0;
665         boolean lastLayerValid = false;
666         for (int pos = 0; ; pos += step) {
667             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
668             if (area.isEmpty()) {
669                 break;
670             }
671             area.offset(crop.left, crop.top);
672             area.intersect(crop);
673             for (int lr = 0; lr < 2; ++lr) {
674                 long[] oneStat = CodecUtils.getRawStats(image, area);
675                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
676                                     + lastLayer + ": " + Arrays.toString(oneStat));
677                 for (int i = 0; i < oneStat.length; i++) {
678                     rawStats[layer][i] += oneStat[i];
679                     if (lastLayerValid) {
680                         rawStats[lastLayer][i] -= oneStat[i];
681                     }
682                 }
683                 if (VERBOSE && lastLayerValid) {
684                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
685                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
686                 }
687                 // switch to the opposite side
688                 layer ^= 2;      // NUM_SIDES / 2
689                 lastLayer ^= 2;  // NUM_SIDES / 2
690                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
691             }
692 
693             lastLayer = layer;
694             lastLayerValid = true;
695             layer = (layer + 1) % NUM_SIDES;
696         }
697 
698         for (layer = 0; layer < NUM_SIDES; ++layer) {
699             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
700             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
701             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
702 
703             // check layer uniformity
704             for (int i = 0; i < 3; i++) {
705                 assertTrue("color of layer-" + layer + " is not uniform: "
706                         + Arrays.toString(stats),
707                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
708             }
709 
710             // check layer color
711             for (int i = 0; i < 3; i++) {
712                 assertTrue("color of layer-" + layer + " mismatches target "
713                         + Arrays.toString(colors[layer]) + " vs "
714                         + Arrays.toString(Arrays.copyOf(stats, 3)),
715                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
716             }
717         }
718     }
719 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)720     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
721             long ts) {
722 
723         assertTrue("YUV format must be one of the YUV_420_888, NV21, or YV12",
724                 format == ImageFormat.YUV_420_888 ||
725                 format == ImageFormat.NV21 ||
726                 format == ImageFormat.YV12);
727 
728         if (VERBOSE) Log.v(TAG, "Validating YUV data");
729         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
730         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
731     }
732 
checkYuvFormat(int format)733     private static void checkYuvFormat(int format) {
734         if ((format != ImageFormat.YUV_420_888) &&
735                 (format != ImageFormat.NV21) &&
736                 (format != ImageFormat.YV12)) {
737             fail("Wrong formats: " + format);
738         }
739     }
740     /**
741      * <p>Check android image format validity for an image, only support below formats:</p>
742      *
743      * <p>Valid formats are YUV_420_888/NV21/YV12 for video decoder</p>
744      */
checkAndroidImageFormat(Image image)745     private static void checkAndroidImageFormat(Image image) {
746         int format = image.getFormat();
747         Plane[] planes = image.getPlanes();
748         switch (format) {
749             case ImageFormat.YUV_420_888:
750             case ImageFormat.NV21:
751             case ImageFormat.YV12:
752                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
753                 break;
754             default:
755                 fail("Unsupported Image Format: " + format);
756         }
757     }
758 
759     /**
760      * Get a byte array image data from an Image object.
761      * <p>
762      * Read data from all planes of an Image into a contiguous unpadded,
763      * unpacked 1-D linear byte array, such that it can be write into disk, or
764      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12
765      * input Image format.
766      * </p>
767      * <p>
768      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
769      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
770      * (xstride = width, ystride = height for chroma and luma components).
771      * </p>
772      */
getDataFromImage(Image image)773     private static byte[] getDataFromImage(Image image) {
774         assertNotNull("Invalid image:", image);
775         Rect crop = image.getCropRect();
776         int format = image.getFormat();
777         int width = crop.width();
778         int height = crop.height();
779         int rowStride, pixelStride;
780         byte[] data = null;
781 
782         // Read image data
783         Plane[] planes = image.getPlanes();
784         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
785 
786         // Check image validity
787         checkAndroidImageFormat(image);
788 
789         ByteBuffer buffer = null;
790 
791         int offset = 0;
792         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
793         byte[] rowData = new byte[planes[0].getRowStride()];
794         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
795         for (int i = 0; i < planes.length; i++) {
796             int shift = (i == 0) ? 0 : 1;
797             buffer = planes[i].getBuffer();
798             assertNotNull("Fail to get bytebuffer from plane", buffer);
799             rowStride = planes[i].getRowStride();
800             pixelStride = planes[i].getPixelStride();
801             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
802             if (VERBOSE) {
803                 Log.v(TAG, "pixelStride " + pixelStride);
804                 Log.v(TAG, "rowStride " + rowStride);
805                 Log.v(TAG, "width " + width);
806                 Log.v(TAG, "height " + height);
807             }
808             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
809             int w = crop.width() >> shift;
810             int h = crop.height() >> shift;
811             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
812             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
813             for (int row = 0; row < h; row++) {
814                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
815                 int length;
816                 if (pixelStride == bytesPerPixel) {
817                     // Special case: optimized read of the entire row
818                     length = w * bytesPerPixel;
819                     buffer.get(data, offset, length);
820                     offset += length;
821                 } else {
822                     // Generic case: should work for any pixelStride but slower.
823                     // Use intermediate buffer to avoid read byte-by-byte from
824                     // DirectByteBuffer, which is very bad for performance
825                     length = (w - 1) * pixelStride + bytesPerPixel;
826                     buffer.get(rowData, 0, length);
827                     for (int col = 0; col < w; col++) {
828                         data[offset++] = rowData[col * pixelStride];
829                     }
830                 }
831                 // Advance buffer the remainder of the row stride
832                 if (row < h - 1) {
833                     buffer.position(buffer.position() + rowStride - length);
834                 }
835             }
836             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
837         }
838         return data;
839     }
840 
dumpFile(String fileName, byte[] data)841     private static void dumpFile(String fileName, byte[] data) {
842         assertNotNull("fileName must not be null", fileName);
843         assertNotNull("data must not be null", data);
844 
845         FileOutputStream outStream;
846         try {
847             Log.v(TAG, "output will be saved as " + fileName);
848             outStream = new FileOutputStream(fileName);
849         } catch (IOException ioe) {
850             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
851         }
852 
853         try {
854             outStream.write(data);
855             outStream.close();
856         } catch (IOException ioe) {
857             throw new RuntimeException("failed writing data to file " + fileName, ioe);
858         }
859     }
860 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)861     private void createImageReader(
862             int width, int height, int format, int maxNumImages,
863             ImageReader.OnImageAvailableListener listener)  {
864         closeImageReader();
865 
866         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
867         mReaderSurface = mReader.getSurface();
868         mReader.setOnImageAvailableListener(listener, mHandler);
869         if (VERBOSE) {
870             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
871                     format));
872         }
873     }
874 
875     /**
876      * Close the pending images then close current active {@link ImageReader} object.
877      */
closeImageReader()878     private void closeImageReader() {
879         if (mReader != null) {
880             try {
881                 // Close all possible pending images first.
882                 Image image = mReader.acquireLatestImage();
883                 if (image != null) {
884                     image.close();
885                 }
886             } finally {
887                 mReader.close();
888                 mReader = null;
889             }
890         }
891     }
892 }
893