1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.decoder.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
20 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
21 
22 import static org.junit.Assert.assertEquals;
23 import static org.junit.Assert.assertNotNull;
24 import static org.junit.Assert.assertNull;
25 import static org.junit.Assert.assertTrue;
26 import static org.junit.Assert.fail;
27 import static org.junit.Assume.assumeTrue;
28 
29 import android.graphics.ImageFormat;
30 import android.graphics.Rect;
31 import android.media.Image;
32 import android.media.Image.Plane;
33 import android.media.ImageReader;
34 import android.media.MediaCodec;
35 import android.media.MediaCodecInfo;
36 import android.media.MediaCodecInfo.CodecProfileLevel;
37 import android.media.MediaCodecInfo.CodecCapabilities;
38 import android.media.MediaCodecInfo.VideoCapabilities;
39 import android.media.MediaExtractor;
40 import android.media.MediaFormat;
41 import android.media.cts.CodecUtils;
42 import android.media.cts.TestArgs;
43 import android.os.Handler;
44 import android.os.HandlerThread;
45 import android.platform.test.annotations.AppModeFull;
46 import android.platform.test.annotations.Presubmit;
47 import android.platform.test.annotations.RequiresDevice;
48 import android.util.Log;
49 import android.util.Pair;
50 import android.view.Surface;
51 
52 import androidx.test.filters.SmallTest;
53 
54 import com.android.compatibility.common.util.MediaUtils;
55 import com.android.compatibility.common.util.Preconditions;
56 
57 import org.junit.After;
58 import org.junit.Before;
59 import org.junit.Test;
60 import org.junit.runner.RunWith;
61 import org.junit.runners.Parameterized;
62 
63 import java.io.FileOutputStream;
64 import java.io.IOException;
65 import java.nio.ByteBuffer;
66 import java.util.ArrayList;
67 import java.util.Arrays;
68 import java.util.Collection;
69 import java.util.List;
70 import java.util.concurrent.LinkedBlockingQueue;
71 import java.util.concurrent.TimeUnit;
72 
73 /**
74  * Basic test for ImageReader APIs.
75  * <p>
76  * It uses MediaCodec to decode a short video stream, send the video frames to
77  * the surface provided by ImageReader. Then compare if output buffers of the
78  * ImageReader matches the output buffers of the MediaCodec. The video format
79  * used here is AVC although the compression format doesn't matter for this
80  * test. For decoder test, hw and sw decoders are tested,
81  * </p>
82  */
83 @Presubmit
84 @SmallTest
85 @RequiresDevice
86 @AppModeFull(reason = "Instant apps cannot access the SD card")
87 @RunWith(Parameterized.class)
88 public class ImageReaderDecoderTest {
89     private static final String TAG = "ImageReaderDecoderTest";
90     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
91     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
92     private static final long DEFAULT_TIMEOUT_US = 10000;
93     private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 1000;
94     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/";
95     private static final int NUM_FRAME_DECODED = 100;
96     // video decoders only support a single outstanding image with the consumer
97     private static final int MAX_NUM_IMAGES = 1;
98     private static final float COLOR_STDEV_ALLOWANCE = 5f;
99     private static final float COLOR_DELTA_ALLOWANCE = 5f;
100 
101     private final static int MODE_IMAGEREADER = 0;
102     private final static int MODE_IMAGE       = 1;
103 
104     private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
105     private ImageReader mReader;
106     private Surface mReaderSurface;
107     private HandlerThread mHandlerThread;
108     private Handler mHandler;
109     private ImageListener mImageListener;
110 
111     public String mMime;
112     public String mCodecName;
113     public MediaAsset mMediaAsset;
114     public int mMode;
115     public String mTestId;
116     MediaCodec mDecoder = null;
117     MediaExtractor mExtractor = null;
118 
ImageReaderDecoderTest(String mime, String codecName, MediaAsset asset, int mode, String testId)119     public ImageReaderDecoderTest(String mime, String codecName, MediaAsset asset, int mode,
120                                   String testId) {
121         mMime = mime;
122         mCodecName = codecName;
123         mMediaAsset = asset;
124         mMode = mode;
125         mTestId = testId;
126     }
127 
128     @Parameterized.Parameters(name = "{index}_{0}_{1}_{4}")
input()129     public static Collection<Object[]> input() {
130         final List<Object[]> argsList = new ArrayList<>();
131         for (MediaAssets assets : ASSETS) {
132             String mime = assets.getMime();
133             if (TestArgs.shouldSkipMediaType(mime)) {
134                 continue;
135             }
136             String[] decoders = MediaUtils.getDecoderNamesForMime(mime);
137             for (String decoder: decoders) {
138                 if (TestArgs.shouldSkipCodec(decoder)) {
139                     continue;
140                 }
141                 for (MediaAsset asset : assets.getAssets()) {
142                     String id = asset.getWidth() + "x" + asset.getHeight();
143                     id += "_" + asset.getBitDepth() + "bit";
144                     if (asset.getIsSwirl()) {
145                         id += "_swirl";
146                         argsList.add(new Object[]{mime, decoder, asset, MODE_IMAGE, id + "_image"});
147                     }
148                     argsList.add(new Object[]{mime, decoder, asset, MODE_IMAGEREADER,
149                             id + "_imagereader"});
150                 }
151             }
152         }
153         return argsList;
154     }
155 
156     @Before
setUp()157     public void setUp() throws Exception {
158         mHandlerThread = new HandlerThread(TAG);
159         mHandlerThread.start();
160         mHandler = new Handler(mHandlerThread.getLooper());
161         mImageListener = new ImageListener();
162 
163         mDecoder = MediaCodec.createByCodecName(mCodecName);
164         mExtractor = new MediaExtractor();
165     }
166 
167     @After
tearDown()168     public void tearDown() throws Exception {
169         closeImageReader();
170         mHandlerThread.quitSafely();
171         mHandler = null;
172         if (mExtractor != null) {
173             mExtractor.release();
174         }
175         if (mDecoder != null) {
176             mDecoder.release();
177         }
178     }
179 
180     static class MediaAsset {
MediaAsset(String resource, int width, int height, boolean isSwirl, int bitDepth)181         public MediaAsset(String resource, int width, int height, boolean isSwirl,
182                           int bitDepth) {
183             mResource = resource;
184             mWidth = width;
185             mHeight = height;
186             mIsSwirl = isSwirl;
187             mBitDepth = bitDepth;
188         }
189 
MediaAsset(String resource, int width, int height)190         public MediaAsset(String resource, int width, int height) {
191             this(resource, width, height, true, 8);
192         }
193 
MediaAsset(String resource, int width, int height, boolean isSwirl)194         public MediaAsset(String resource, int width, int height, boolean isSwirl) {
195             this(resource, width, height, isSwirl, 8);
196         }
197 
MediaAsset(String resource, int width, int height, int bitDepth)198         public MediaAsset(String resource, int width, int height, int bitDepth) {
199             this(resource, width, height, true, bitDepth);
200         }
201 
getWidth()202         public int getWidth() {
203             return mWidth;
204         }
205 
getHeight()206         public int getHeight() {
207             return mHeight;
208         }
209 
getIsSwirl()210         public boolean getIsSwirl() {
211             return mIsSwirl;
212         }
213 
getBitDepth()214         public int getBitDepth() {
215             return mBitDepth;
216         }
217 
getResource()218         public String getResource() {
219             return mResource;
220         }
221 
222         private final String mResource;
223         private final int mWidth;
224         private final int mHeight;
225         private final boolean mIsSwirl;
226         private final int mBitDepth;
227     }
228 
229     static class MediaAssets {
MediaAssets(String mime, MediaAsset... assets)230         public MediaAssets(String mime, MediaAsset... assets) {
231             mMime = mime;
232             mAssets = assets;
233         }
234 
getMime()235         public String getMime() {
236             return mMime;
237         }
238 
getAssets()239         public MediaAsset[] getAssets() {
240             return mAssets;
241         }
242 
243         private final String mMime;
244         private final MediaAsset[] mAssets;
245     }
246 
247     static final String mInpPrefix = WorkDir.getMediaDirString();
248 
249     private static MediaAssets H263_ASSETS = new MediaAssets(
250             MediaFormat.MIMETYPE_VIDEO_H263,
251             new MediaAsset("swirl_176x144_h263.3gp", 176, 144),
252             new MediaAsset("swirl_352x288_h263.3gp", 352, 288),
253             new MediaAsset("swirl_128x96_h263.3gp", 128, 96));
254 
255     private static MediaAssets MPEG4_ASSETS = new MediaAssets(
256             MediaFormat.MIMETYPE_VIDEO_MPEG4,
257             new MediaAsset("swirl_128x128_mpeg4.mp4", 128, 128),
258             new MediaAsset("swirl_144x136_mpeg4.mp4", 144, 136),
259             new MediaAsset("swirl_136x144_mpeg4.mp4", 136, 144),
260             new MediaAsset("swirl_132x130_mpeg4.mp4", 132, 130),
261             new MediaAsset("swirl_130x132_mpeg4.mp4", 130, 132));
262 
263     private static MediaAssets H264_ASSETS = new MediaAssets(
264             MediaFormat.MIMETYPE_VIDEO_AVC,
265             new MediaAsset("swirl_128x128_h264.mp4", 128, 128),
266             new MediaAsset("swirl_144x136_h264.mp4", 144, 136),
267             new MediaAsset("swirl_136x144_h264.mp4", 136, 144),
268             new MediaAsset("swirl_132x130_h264.mp4", 132, 130),
269             new MediaAsset("swirl_130x132_h264.mp4", 130, 132),
270             new MediaAsset("swirl_128x128_h264_10bit.mp4", 128, 128, 10),
271             new MediaAsset("swirl_144x136_h264_10bit.mp4", 144, 136, 10),
272             new MediaAsset("swirl_136x144_h264_10bit.mp4", 136, 144, 10),
273             new MediaAsset("swirl_132x130_h264_10bit.mp4", 132, 130, 10),
274             new MediaAsset("swirl_130x132_h264_10bit.mp4", 130, 132, 10),
275             new MediaAsset("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
276                     480, 360, false));
277 
278     private static MediaAssets H265_ASSETS = new MediaAssets(
279             MediaFormat.MIMETYPE_VIDEO_HEVC,
280             new MediaAsset("swirl_128x128_h265.mp4", 128, 128),
281             new MediaAsset("swirl_144x136_h265.mp4", 144, 136),
282             new MediaAsset("swirl_136x144_h265.mp4", 136, 144),
283             new MediaAsset("swirl_132x130_h265.mp4", 132, 130),
284             new MediaAsset("swirl_130x132_h265.mp4", 130, 132),
285             new MediaAsset("swirl_128x128_h265_10bit.mp4", 128, 128, 10),
286             new MediaAsset("swirl_144x136_h265_10bit.mp4", 144, 136, 10),
287             new MediaAsset("swirl_136x144_h265_10bit.mp4", 136, 144, 10),
288             new MediaAsset("swirl_132x130_h265_10bit.mp4", 132, 130, 10),
289             new MediaAsset("swirl_130x132_h265_10bit.mp4", 130, 132, 10));
290 
291     private static MediaAssets VP8_ASSETS = new MediaAssets(
292             MediaFormat.MIMETYPE_VIDEO_VP8,
293             new MediaAsset("swirl_128x128_vp8.webm", 128, 128),
294             new MediaAsset("swirl_144x136_vp8.webm", 144, 136),
295             new MediaAsset("swirl_136x144_vp8.webm", 136, 144),
296             new MediaAsset("swirl_132x130_vp8.webm", 132, 130),
297             new MediaAsset("swirl_130x132_vp8.webm", 130, 132));
298 
299     private static MediaAssets VP9_ASSETS = new MediaAssets(
300             MediaFormat.MIMETYPE_VIDEO_VP9,
301             new MediaAsset("swirl_128x128_vp9.webm", 128, 128),
302             new MediaAsset("swirl_144x136_vp9.webm", 144, 136),
303             new MediaAsset("swirl_136x144_vp9.webm", 136, 144),
304             new MediaAsset("swirl_132x130_vp9.webm", 132, 130),
305             new MediaAsset("swirl_130x132_vp9.webm", 130, 132),
306             new MediaAsset("swirl_128x128_vp9_10bit.webm", 128, 128, 10),
307             new MediaAsset("swirl_144x136_vp9_10bit.webm", 144, 136, 10),
308             new MediaAsset("swirl_136x144_vp9_10bit.webm", 136, 144, 10),
309             new MediaAsset("swirl_132x130_vp9_10bit.webm", 132, 130, 10),
310             new MediaAsset("swirl_130x132_vp9_10bit.webm", 130, 132, 10));
311 
312     private static MediaAssets AV1_ASSETS = new MediaAssets(
313             MediaFormat.MIMETYPE_VIDEO_AV1,
314             new MediaAsset("swirl_128x128_av1.webm", 128, 128),
315             new MediaAsset("swirl_144x136_av1.webm", 144, 136),
316             new MediaAsset("swirl_136x144_av1.webm", 136, 144),
317             new MediaAsset("swirl_132x130_av1.webm", 132, 130),
318             new MediaAsset("swirl_130x132_av1.webm", 130, 132),
319             new MediaAsset("swirl_128x128_av1_10bit.webm", 128, 128, 10),
320             new MediaAsset("swirl_144x136_av1_10bit.webm", 144, 136, 10),
321             new MediaAsset("swirl_136x144_av1_10bit.webm", 136, 144, 10),
322             new MediaAsset("swirl_132x130_av1_10bit.webm", 132, 130, 10),
323             new MediaAsset("swirl_130x132_av1_10bit.webm", 130, 132, 10));
324 
325     static final float SWIRL_FPS = 12.f;
326 
327     private static MediaAssets[] ASSETS = {H263_ASSETS, MPEG4_ASSETS, H264_ASSETS, H265_ASSETS,
328             VP8_ASSETS, VP9_ASSETS, AV1_ASSETS};
329 
isColorFormatSupported(CodecCapabilities caps, int colorFormat)330    boolean isColorFormatSupported(CodecCapabilities caps, int colorFormat) {
331         for (int c : caps.colorFormats) {
332             if (c == colorFormat) {
333                 return true;
334             }
335         }
336         return false;
337     }
338 
339     @Test
decodeTest()340     public void decodeTest() throws Exception {
341         int imageFormat = ImageFormat.YUV_420_888;
342         int colorFormat = COLOR_FormatYUV420Flexible;
343         String video = mMediaAsset.getResource();
344         int width = mMediaAsset.getWidth();
345         int height = mMediaAsset.getHeight();
346 
347         if (8 == mMediaAsset.getBitDepth()) {
348             imageFormat = ImageFormat.YUV_420_888;
349             colorFormat = COLOR_FormatYUV420Flexible;
350         } else {
351             imageFormat = ImageFormat.YCBCR_P010;
352             colorFormat = COLOR_FormatYUVP010;
353         }
354 
355         if (DEBUG) {
356             Log.d(TAG, "videoDecode " + mCodecName + " " + width + "x" + height + " bit depth " +
357                     mMediaAsset.getBitDepth());
358         }
359 
360         MediaFormat mediaFormat = null;
361 
362         Preconditions.assertTestFileExists(mInpPrefix + video);
363         mExtractor.setDataSource(mInpPrefix + video);
364 
365         mediaFormat = mExtractor.getTrackFormat(0);
366         mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
367 
368         MediaCodecInfo info = mDecoder.getCodecInfo();
369         CodecCapabilities caps = info.getCapabilitiesForType(mMime);
370         VideoCapabilities videoCaps = caps.getVideoCapabilities();
371 
372         assumeTrue("Media format " + mediaFormat + " is not supported by " + mCodecName,
373                 caps.isFormatSupported(mediaFormat));
374         assumeTrue(mMediaAsset.getWidth() + "x" + mMediaAsset.getHeight() + " @ " +
375                 SWIRL_FPS + " fps is not supported by " + mCodecName,
376                 videoCaps.areSizeAndRateSupported(mMediaAsset.getWidth(),
377                 mMediaAsset.getHeight(), SWIRL_FPS));
378         assumeTrue("Color format " + colorFormat + " is not supported by " + mCodecName,
379                 isColorFormatSupported(caps, colorFormat));
380 
381         decodeFramesToImage(
382                 mDecoder, mExtractor, mediaFormat,
383                 width, height, imageFormat, mMode, mMediaAsset.getIsSwirl());
384 
385         mDecoder.stop();
386 
387     }
388 
389     private static class ImageListener implements ImageReader.OnImageAvailableListener {
390         private final LinkedBlockingQueue<Pair<Image, Exception>> mQueue =
391                 new LinkedBlockingQueue<Pair<Image, Exception>>();
392 
393         @Override
onImageAvailable(ImageReader reader)394         public void onImageAvailable(ImageReader reader) {
395             try {
396                 mQueue.put(Pair.create(reader.acquireNextImage(), null /* Exception */));
397             } catch (Exception e) {
398                 // pass any exception back to the other thread, taking the exception
399                 // here crashes the instrumentation in cts/junit.
400                 Log.e(TAG, "Can't handle Exceptions in onImageAvailable " + e);
401                 try {
402                     mQueue.put(Pair.create(null /* Image */, e));
403                 } catch (Exception e2) {
404                     // ignore the nested exception, other side will see a timeout.
405                     Log.e(TAG, "Failed to send exception info across queue: " + e2);
406                 }
407             }
408         }
409 
410         /**
411          * Get an image from the image reader.
412          *
413          * @param timeout Timeout value for the wait.
414          * @return The image from the image reader.
415          */
getImage(long timeout)416         public Image getImage(long timeout) throws InterruptedException {
417             Pair<Image,Exception> imageResult = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
418             Image image = imageResult.first;
419             Exception e = imageResult.second;
420 
421             assertNull("onImageAvailable() generated an exception: " + e, e);
422             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
423             return image;
424         }
425     }
426 
427     /**
428      * Decode video frames to image reader.
429      */
decodeFramesToImage( MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat, int width, int height, int imageFormat, int mode, boolean checkSwirl)430     private void decodeFramesToImage(
431             MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat,
432             int width, int height, int imageFormat, int mode, boolean checkSwirl)
433             throws InterruptedException {
434         ByteBuffer[] decoderInputBuffers;
435         ByteBuffer[] decoderOutputBuffers;
436 
437         // Configure decoder.
438         if (VERBOSE) Log.v(TAG, "stream format: " + mediaFormat);
439         if (mode == MODE_IMAGEREADER) {
440             createImageReader(width, height, imageFormat, MAX_NUM_IMAGES, mImageListener);
441             decoder.configure(mediaFormat, mReaderSurface, null /* crypto */, 0 /* flags */);
442         } else {
443             assertEquals(mode, MODE_IMAGE);
444             decoder.configure(mediaFormat, null /* surface */, null /* crypto */, 0 /* flags */);
445         }
446 
447         decoder.start();
448         decoderInputBuffers = decoder.getInputBuffers();
449         decoderOutputBuffers = decoder.getOutputBuffers();
450         extractor.selectTrack(0);
451 
452         // Start decoding and get Image, only test the first NUM_FRAME_DECODED frames.
453         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
454         boolean sawInputEOS = false;
455         boolean sawOutputEOS = false;
456         int outputFrameCount = 0;
457         while (!sawOutputEOS && outputFrameCount < NUM_FRAME_DECODED) {
458             if (VERBOSE) Log.v(TAG, "loop:" + outputFrameCount);
459             // Feed input frame.
460             if (!sawInputEOS) {
461                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
462                 if (inputBufIndex >= 0) {
463                     ByteBuffer dstBuf = decoderInputBuffers[inputBufIndex];
464                     int sampleSize =
465                         extractor.readSampleData(dstBuf, 0 /* offset */);
466 
467                     if (VERBOSE) Log.v(TAG, "queue a input buffer, idx/size: "
468                         + inputBufIndex + "/" + sampleSize);
469 
470                     long presentationTimeUs = 0;
471 
472                     if (sampleSize < 0) {
473                         if (VERBOSE) Log.v(TAG, "saw input EOS.");
474                         sawInputEOS = true;
475                         sampleSize = 0;
476                     } else {
477                         presentationTimeUs = extractor.getSampleTime();
478                     }
479 
480                     decoder.queueInputBuffer(
481                             inputBufIndex,
482                             0 /* offset */,
483                             sampleSize,
484                             presentationTimeUs,
485                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
486 
487                     if (!sawInputEOS) {
488                         extractor.advance();
489                     }
490                 }
491             }
492 
493             // Get output frame
494             int res = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
495             if (VERBOSE) Log.v(TAG, "got a buffer: " + info.size + "/" + res);
496             if (res == MediaCodec.INFO_TRY_AGAIN_LATER) {
497                 // no output available yet
498                 if (VERBOSE) Log.v(TAG, "no output frame available");
499             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
500                 // decoder output buffers changed, need update.
501                 if (VERBOSE) Log.v(TAG, "decoder output buffers changed");
502                 decoderOutputBuffers = decoder.getOutputBuffers();
503             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
504                 // this happens before the first frame is returned.
505                 MediaFormat outFormat = decoder.getOutputFormat();
506                 if (VERBOSE) Log.v(TAG, "decoder output format changed: " + outFormat);
507             } else if (res < 0) {
508                 // Should be decoding error.
509                 fail("unexpected result from decoder.dequeueOutputBuffer: " + res);
510             } else {
511                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
512                     sawOutputEOS = true;
513                 }
514 
515                 // res >= 0: normal decoding case, copy the output buffer.
516                 // Will use it as reference to valid the ImageReader output
517                 // Some decoders output a 0-sized buffer at the end. Ignore those.
518                 boolean doRender = (info.size != 0);
519 
520                 if (doRender) {
521                     outputFrameCount++;
522                     String fileName = DEBUG_FILE_NAME_BASE + mCodecName + "_" + mTestId + ".yuv";
523 
524                     Image image = null;
525                     try {
526                         if (mode == MODE_IMAGE) {
527                             image = decoder.getOutputImage(res);
528                         } else {
529                             decoder.releaseOutputBuffer(res, doRender);
530                             res = -1;
531                             // Read image and verify
532                             image = mImageListener.getImage(WAIT_FOR_IMAGE_TIMEOUT_MS);
533                         }
534                         validateImage(image, width, height, imageFormat, fileName);
535 
536                         if (checkSwirl) {
537                             try {
538                                 validateSwirl(image);
539                             } catch (Throwable e) {
540                                 dumpFile(fileName, getDataFromImage(image));
541                                 throw e;
542                             }
543                         }
544                     } finally {
545                         if (image != null) {
546                             image.close();
547                         }
548                     }
549                 }
550 
551                 if (res >= 0) {
552                     decoder.releaseOutputBuffer(res, false /* render */);
553                 }
554             }
555         }
556     }
557 
558     /**
559      * Validate image based on format and size.
560      *
561      * @param image The image to be validated.
562      * @param width The image width.
563      * @param height The image height.
564      * @param format The image format.
565      * @param filePath The debug dump file path, null if don't want to dump to file.
566      */
validateImage( Image image, int width, int height, int format, String filePath)567     public static void validateImage(
568             Image image, int width, int height, int format, String filePath) {
569         if (VERBOSE) {
570             Plane[] imagePlanes = image.getPlanes();
571             Log.v(TAG, "Image " + filePath + " Info:");
572             Log.v(TAG, "first plane pixelstride " + imagePlanes[0].getPixelStride());
573             Log.v(TAG, "first plane rowstride " + imagePlanes[0].getRowStride());
574             Log.v(TAG, "Image timestamp:" + image.getTimestamp());
575         }
576 
577         assertNotNull("Input image is invalid", image);
578         assertEquals("Format doesn't match", format, image.getFormat());
579         assertEquals("Width doesn't match", width, image.getCropRect().width());
580         assertEquals("Height doesn't match", height, image.getCropRect().height());
581 
582         if(VERBOSE) Log.v(TAG, "validating Image");
583         byte[] data = getDataFromImage(image);
584         assertTrue("Invalid image data", data != null && data.length > 0);
585 
586         validateYuvData(data, width, height, format, image.getTimestamp());
587 
588         if (VERBOSE && filePath != null) {
589             dumpFile(filePath, data);
590         }
591     }
592 
validateSwirl(Image image)593     private static void validateSwirl(Image image) {
594         Rect crop = image.getCropRect();
595         final int NUM_SIDES = 4;
596         final int step = 8;      // the width of the layers
597         long[][] rawStats = new long[NUM_SIDES][10];
598         // expected colors for YUV 4:2:0 bit-depth 8
599         int[][] colors = new int[][] {
600             { 111, 96, 204 }, { 178, 27, 174 }, { 100, 192, 92 }, { 106, 117, 62 }
601         };
602 
603         // successively accumulate statistics for each layer of the swirl
604         // by using overlapping rectangles, and the observation that
605         // layer_i = rectangle_i - rectangle_(i+1)
606         int lastLayer = 0;
607         int layer = 0;
608         boolean lastLayerValid = false;
609         for (int pos = 0; ; pos += step) {
610             Rect area = new Rect(pos - step, pos, crop.width() / 2, crop.height() + 2 * step - pos);
611             if (area.isEmpty()) {
612                 break;
613             }
614             area.offset(crop.left, crop.top);
615             area.intersect(crop);
616             for (int lr = 0; lr < 2; ++lr) {
617                 long[] oneStat = CodecUtils.getRawStats(image, area);
618                 if (VERBOSE) Log.v(TAG, "area=" + area + ", layer=" + layer + ", last="
619                                     + lastLayer + ": " + Arrays.toString(oneStat));
620                 for (int i = 0; i < oneStat.length; i++) {
621                     rawStats[layer][i] += oneStat[i];
622                     if (lastLayerValid) {
623                         rawStats[lastLayer][i] -= oneStat[i];
624                     }
625                 }
626                 if (VERBOSE && lastLayerValid) {
627                     Log.v(TAG, "layer-" + lastLayer + ": " + Arrays.toString(rawStats[lastLayer]));
628                     Log.v(TAG, Arrays.toString(CodecUtils.Raw2YUVStats(rawStats[lastLayer])));
629                 }
630                 // switch to the opposite side
631                 layer ^= 2;      // NUM_SIDES / 2
632                 lastLayer ^= 2;  // NUM_SIDES / 2
633                 area.offset(crop.centerX() - area.left, 2 * (crop.centerY() - area.centerY()));
634             }
635 
636             lastLayer = layer;
637             lastLayerValid = true;
638             layer = (layer + 1) % NUM_SIDES;
639         }
640 
641         for (layer = 0; layer < NUM_SIDES; ++layer) {
642             float[] stats = CodecUtils.Raw2YUVStats(rawStats[layer]);
643             if (DEBUG) Log.d(TAG, "layer-" + layer + ": " + Arrays.toString(stats));
644             if (VERBOSE) Log.v(TAG, Arrays.toString(rawStats[layer]));
645 
646             // check layer uniformity
647             for (int i = 0; i < 3; i++) {
648                 assertTrue("color of layer-" + layer + " is not uniform: "
649                         + Arrays.toString(stats),
650                         stats[3 + i] < COLOR_STDEV_ALLOWANCE);
651             }
652 
653             // check layer color
654             for (int i = 0; i < 3; i++) {
655                 assertTrue("color of layer-" + layer + " mismatches target "
656                         + Arrays.toString(colors[layer]) + " vs "
657                         + Arrays.toString(Arrays.copyOf(stats, 3)),
658                         Math.abs(stats[i] - colors[layer][i]) < COLOR_DELTA_ALLOWANCE);
659             }
660         }
661     }
662 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts)663     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
664             long ts) {
665 
666         assertTrue("YUV format must be one of the YUV_420_888, NV21, YV12 or YCBCR_P010",
667                 format == ImageFormat.YUV_420_888 ||
668                 format == ImageFormat.NV21 ||
669                 format == ImageFormat.YV12 ||
670                 format == ImageFormat.YCBCR_P010);
671 
672         if (VERBOSE) Log.v(TAG, "Validating YUV data");
673         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
674         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
675     }
676 
checkYuvFormat(int format)677     private static void checkYuvFormat(int format) {
678         if ((format != ImageFormat.YUV_420_888) &&
679                 (format != ImageFormat.NV21) &&
680                 (format != ImageFormat.YV12) &&
681                 (format != ImageFormat.YCBCR_P010)) {
682             fail("Wrong formats: " + format);
683         }
684     }
685     /**
686      * <p>Check android image format validity for an image, only support below formats:</p>
687      *
688      * <p>Valid formats are YUV_420_888/NV21/YV12/P010 for video decoder</p>
689      */
checkAndroidImageFormat(Image image)690     private static void checkAndroidImageFormat(Image image) {
691         int format = image.getFormat();
692         Plane[] planes = image.getPlanes();
693         switch (format) {
694             case ImageFormat.YUV_420_888:
695             case ImageFormat.NV21:
696             case ImageFormat.YV12:
697             case ImageFormat.YCBCR_P010:
698                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
699                 break;
700             default:
701                 fail("Unsupported Image Format: " + format);
702         }
703     }
704 
705     /**
706      * Get a byte array image data from an Image object.
707      * <p>
708      * Read data from all planes of an Image into a contiguous unpadded,
709      * unpacked 1-D linear byte array, such that it can be write into disk, or
710      * accessed by software conveniently. It supports YUV_420_888/NV21/YV12/P010
711      * input Image format.
712      * </p>
713      * <p>
714      * For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
715      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
716      * (xstride = width, ystride = height for chroma and luma components).
717      * </p>
718      */
getDataFromImage(Image image)719     private static byte[] getDataFromImage(Image image) {
720         assertNotNull("Invalid image:", image);
721         Rect crop = image.getCropRect();
722         int format = image.getFormat();
723         int width = crop.width();
724         int height = crop.height();
725         int rowStride, pixelStride;
726         byte[] data = null;
727 
728         // Read image data
729         Plane[] planes = image.getPlanes();
730         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
731 
732         // Check image validity
733         checkAndroidImageFormat(image);
734 
735         ByteBuffer buffer = null;
736 
737         int offset = 0;
738         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
739         byte[] rowData = new byte[planes[0].getRowStride()];
740         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
741         for (int i = 0; i < planes.length; i++) {
742             int shift = (i == 0) ? 0 : 1;
743             buffer = planes[i].getBuffer();
744             assertNotNull("Fail to get bytebuffer from plane", buffer);
745             rowStride = planes[i].getRowStride();
746             pixelStride = planes[i].getPixelStride();
747             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
748             if (VERBOSE) {
749                 Log.v(TAG, "pixelStride " + pixelStride);
750                 Log.v(TAG, "rowStride " + rowStride);
751                 Log.v(TAG, "width " + width);
752                 Log.v(TAG, "height " + height);
753             }
754             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
755             int w = crop.width() >> shift;
756             int h = crop.height() >> shift;
757             buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
758             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
759             for (int row = 0; row < h; row++) {
760                 // ImageFormat.getBitsPerPixel() returns total bits per pixel, which is 12 for
761                 // YUV 4:2:0 8-bit, whereas bytesPerPixel is for Y plane only
762                 int bytesPerPixel = (ImageFormat.getBitsPerPixel(format) * 2) / (8 * 3);
763                 int length;
764                 if (pixelStride == bytesPerPixel) {
765                     // Special case: optimized read of the entire row
766                     length = w * bytesPerPixel;
767                     buffer.get(data, offset, length);
768                     offset += length;
769                 } else {
770                     // Generic case: should work for any pixelStride but slower.
771                     // Use intermediate buffer to avoid read byte-by-byte from
772                     // DirectByteBuffer, which is very bad for performance
773                     length = (w - 1) * pixelStride + bytesPerPixel;
774                     buffer.get(rowData, 0, length);
775                     for (int col = 0; col < w; col++) {
776                         for (int bytePos = 0; bytePos < bytesPerPixel; ++bytePos) {
777                             data[offset++] = rowData[col * pixelStride + bytePos];
778                         }
779                     }
780                 }
781                 // Advance buffer the remainder of the row stride
782                 if (row < h - 1) {
783                     buffer.position(buffer.position() + rowStride - length);
784                 }
785             }
786             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
787         }
788         return data;
789     }
790 
dumpFile(String fileName, byte[] data)791     private static void dumpFile(String fileName, byte[] data) {
792         assertNotNull("fileName must not be null", fileName);
793         assertNotNull("data must not be null", data);
794 
795         FileOutputStream outStream;
796         try {
797             Log.v(TAG, "output will be saved as " + fileName);
798             outStream = new FileOutputStream(fileName);
799         } catch (IOException ioe) {
800             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
801         }
802 
803         try {
804             outStream.write(data);
805             outStream.close();
806         } catch (IOException ioe) {
807             throw new RuntimeException("failed writing data to file " + fileName, ioe);
808         }
809     }
810 
createImageReader( int width, int height, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener)811     private void createImageReader(
812             int width, int height, int format, int maxNumImages,
813             ImageReader.OnImageAvailableListener listener)  {
814         closeImageReader();
815 
816         mReader = ImageReader.newInstance(width, height, format, maxNumImages);
817         mReaderSurface = mReader.getSurface();
818         mReader.setOnImageAvailableListener(listener, mHandler);
819         if (VERBOSE) {
820             Log.v(TAG, String.format("Created ImageReader size (%dx%d), format %d", width, height,
821                     format));
822         }
823     }
824 
825     /**
826      * Close the pending images then close current active {@link ImageReader} object.
827      */
closeImageReader()828     private void closeImageReader() {
829         if (mReader != null) {
830             try {
831                 // Close all possible pending images first.
832                 Image image = mReader.acquireLatestImage();
833                 if (image != null) {
834                     image.close();
835                 }
836             } finally {
837                 mReader.close();
838                 mReader = null;
839             }
840         }
841     }
842 }
843