1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import android.content.Context;
20 import android.graphics.Bitmap;
21 import android.graphics.BitmapFactory;
22 import android.graphics.BitmapRegionDecoder;
23 import android.graphics.Color;
24 import android.graphics.ImageFormat;
25 import android.graphics.Matrix;
26 import android.graphics.Rect;
27 import android.graphics.RectF;
28 import android.hardware.camera2.CameraCharacteristics;
29 import android.hardware.camera2.CameraDevice;
30 import android.hardware.camera2.CaptureRequest;
31 import android.hardware.camera2.CaptureResult;
32 import android.hardware.camera2.cts.helpers.StaticMetadata;
33 import android.hardware.camera2.cts.rs.BitmapUtils;
34 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
35 import android.hardware.camera2.params.StreamConfigurationMap;
36 import android.media.Image;
37 import android.media.Image.Plane;
38 import android.media.ImageReader;
39 import android.os.ConditionVariable;
40 import android.util.Log;
41 import android.util.Size;
42 import android.view.Surface;
43 
44 import java.nio.ByteBuffer;
45 import java.util.ArrayList;
46 import java.util.List;
47 
48 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
49 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
50 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
51 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
52 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
53 
54 /**
55  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
56  * sends the data to the surface provided by imageReader. Below image formats
57  * are tested:</p>
58  *
59  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
60  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
61  * <p>Some invalid access test. </p>
62  * <p>TODO: Add more format tests? </p>
63  */
64 public class ImageReaderTest extends Camera2AndroidTestCase {
65     private static final String TAG = "ImageReaderTest";
66     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
67     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
68 
69     // Number of frame (for streaming requests) to be verified.
70     private static final int NUM_FRAME_VERIFIED = 2;
71     // Number of frame (for streaming requests) to be verified with log processing time.
72     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
73     // The time to hold each image for to simulate long processing time.
74     private static final int LONG_PROCESS_TIME_MS = 300;
75     // Max number of images can be accessed simultaneously from ImageReader.
76     private static final int MAX_NUM_IMAGES = 5;
77     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
78     // generous to avoid false positives due to punch/saturation operations vendors apply to the
79     // JPEG outputs.
80     private static final double IMAGE_DIFFERENCE_TOLERANCE = 30;
81 
82     private SimpleImageListener mListener;
83 
84     @Override
setContext(Context context)85     public void setContext(Context context) {
86         super.setContext(context);
87     }
88 
89     @Override
setUp()90     protected void setUp() throws Exception {
91         super.setUp();
92     }
93 
94     @Override
tearDown()95     protected void tearDown() throws Exception {
96         super.tearDown();
97     }
98 
testFlexibleYuv()99     public void testFlexibleYuv() throws Exception {
100         for (String id : mCameraIds) {
101             try {
102                 Log.i(TAG, "Testing Camera " + id);
103                 openDevice(id);
104                 bufferFormatTestByCamera(ImageFormat.YUV_420_888, /*repeating*/true);
105             } finally {
106                 closeDevice(id);
107             }
108         }
109     }
110 
testDepth16()111     public void testDepth16() throws Exception {
112         for (String id : mCameraIds) {
113             try {
114                 Log.i(TAG, "Testing Camera " + id);
115                 openDevice(id);
116                 bufferFormatTestByCamera(ImageFormat.DEPTH16, /*repeating*/true);
117             } finally {
118                 closeDevice(id);
119             }
120         }
121     }
122 
testDepthPointCloud()123     public void testDepthPointCloud() throws Exception {
124         for (String id : mCameraIds) {
125             try {
126                 Log.i(TAG, "Testing Camera " + id);
127                 openDevice(id);
128                 bufferFormatTestByCamera(ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
129             } finally {
130                 closeDevice(id);
131             }
132         }
133     }
134 
testJpeg()135     public void testJpeg() throws Exception {
136         for (String id : mCameraIds) {
137             try {
138                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
139                 openDevice(id);
140                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/false);
141             } finally {
142                 closeDevice(id);
143             }
144         }
145     }
146 
testRaw()147     public void testRaw() throws Exception {
148         for (String id : mCameraIds) {
149             try {
150                 Log.v(TAG, "Testing raw capture for camera " + id);
151                 openDevice(id);
152 
153                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/false);
154             } finally {
155                 closeDevice(id);
156             }
157         }
158     }
159 
testRepeatingJpeg()160     public void testRepeatingJpeg() throws Exception {
161         for (String id : mCameraIds) {
162             try {
163                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
164                 openDevice(id);
165                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/true);
166             } finally {
167                 closeDevice(id);
168             }
169         }
170     }
171 
testRepeatingRaw()172     public void testRepeatingRaw() throws Exception {
173         for (String id : mCameraIds) {
174             try {
175                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
176                 openDevice(id);
177 
178                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/true);
179             } finally {
180                 closeDevice(id);
181             }
182         }
183     }
184 
testLongProcessingRepeatingRaw()185     public void testLongProcessingRepeatingRaw() throws Exception {
186         for (String id : mCameraIds) {
187             try {
188                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
189                 openDevice(id);
190                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
191             } finally {
192                 closeDevice(id);
193             }
194         }
195     }
196 
testLongProcessingRepeatingFlexibleYuv()197     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
198         for (String id : mCameraIds) {
199             try {
200                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
201                 openDevice(id);
202                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
203             } finally {
204                 closeDevice(id);
205             }
206         }
207     }
208 
209     /**
210      * Test invalid access of image after an image is closed, further access
211      * of the image will get an IllegalStateException. The basic assumption of
212      * this test is that the ImageReader always gives direct byte buffer, which is always true
213      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
214      * is no guarantee to get an ISE for this invalid access case.
215      */
testInvalidAccessTest()216     public void testInvalidAccessTest() throws Exception {
217         // Test byte buffer access after an image is released, it should throw ISE.
218         for (String id : mCameraIds) {
219             try {
220                 Log.v(TAG, "Testing invalid image access for Camera " + id);
221                 openDevice(id);
222                 invalidAccessTestAfterClose();
223             } finally {
224                 closeDevice(id);
225                 closeDefaultImageReader();
226             }
227         }
228     }
229 
230     /**
231      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
232      *
233      * <p>Both stream formats are mandatory for Camera2 API</p>
234      */
testYuvAndJpeg()235     public void testYuvAndJpeg() throws Exception {
236         for (String id : mCameraIds) {
237             try {
238                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
239                 openDevice(id);
240                 if (!mStaticInfo.isColorOutputSupported()) {
241                     Log.i(TAG, "Camera " + id +
242                             " does not support color outputs, skipping");
243                     continue;
244                 }
245                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
246             } finally {
247                 closeDevice(id);
248             }
249         }
250     }
251 
252     /**
253      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
254      *
255      */
testImageReaderYuvAndRaw()256     public void testImageReaderYuvAndRaw() throws Exception {
257         for (String id : mCameraIds) {
258             try {
259                 Log.v(TAG, "YUV and RAW testing for camera " + id);
260                 openDevice(id);
261                 if (!mStaticInfo.isColorOutputSupported()) {
262                     Log.i(TAG, "Camera " + id +
263                             " does not support color outputs, skipping");
264                     continue;
265                 }
266                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
267             } finally {
268                 closeDevice(id);
269             }
270         }
271     }
272 
273     /**
274      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
275      * resolution and format supported.
276      */
testAllOutputYUVResolutions()277     public void testAllOutputYUVResolutions() throws Exception {
278         for (String id : mCameraIds) {
279             try {
280                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
281                 openDevice(id);
282 
283                 if (!mStaticInfo.isColorOutputSupported()) {
284                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
285                     continue;
286                 }
287 
288                 // Skip warmup on FULL mode devices.
289                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
290                         MAX_NUM_IMAGES - 1 : 0;
291 
292                 // NV21 isn't supported by ImageReader.
293                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
294 
295                 CameraCharacteristics.Key<StreamConfigurationMap> key =
296                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
297                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
298                 int[] supportedFormats = config.getOutputFormats();
299                 List<Integer> supportedYUVFormats = new ArrayList<>();
300                 for (int format : YUVFormats) {
301                     if (CameraTestUtils.contains(supportedFormats, format)) {
302                         supportedYUVFormats.add(format);
303                     }
304                 }
305 
306                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
307                         StaticMetadata.StreamDirection.Output);
308                 assertFalse("JPEG output not supported for camera " + id +
309                         ", at least one JPEG output is required.", jpegSizes.length == 0);
310 
311                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
312 
313                 for (int format : supportedYUVFormats) {
314                     Size[] targetCaptureSizes =
315                             mStaticInfo.getAvailableSizesForFormatChecked(format,
316                             StaticMetadata.StreamDirection.Output);
317 
318                     for (Size captureSz : targetCaptureSizes) {
319                         if (VERBOSE) {
320                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
321                                     + maxJpegSize + " for camera " + mCamera.getId());
322                         }
323 
324                         ImageReader jpegReader = null;
325                         ImageReader yuvReader = null;
326                         try {
327                             // Create YUV image reader
328                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
329                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
330                                     yuvListener);
331                             Surface yuvSurface = yuvReader.getSurface();
332 
333                             // Create JPEG image reader
334                             SimpleImageReaderListener jpegListener =
335                                     new SimpleImageReaderListener();
336                             jpegReader = createImageReader(maxJpegSize,
337                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
338                             Surface jpegSurface = jpegReader.getSurface();
339 
340                             // Setup session
341                             List<Surface> outputSurfaces = new ArrayList<Surface>();
342                             outputSurfaces.add(yuvSurface);
343                             outputSurfaces.add(jpegSurface);
344                             createSession(outputSurfaces);
345 
346                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
347                             CaptureRequest.Builder warmupRequest =
348                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
349                             warmupRequest.addTarget(yuvSurface);
350                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
351                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
352 
353                             for (int i = 0; i < warmupCaptureNumber; i++) {
354                                 startCapture(warmupRequest.build(), /*repeating*/false,
355                                         resultListener, mHandler);
356                             }
357                             for (int i = 0; i < warmupCaptureNumber; i++) {
358                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
359                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
360                                 image.close();
361                             }
362 
363                             // Capture image.
364                             CaptureRequest.Builder mainRequest =
365                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
366                             for (Surface s : outputSurfaces) {
367                                 mainRequest.addTarget(s);
368                             }
369 
370                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
371                                     mHandler);
372 
373                             // Verify capture result and images
374                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
375 
376                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
377                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
378 
379                             //Validate captured images.
380                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
381                                     captureSz.getHeight(), format, /*filePath*/null);
382                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
383                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
384 
385                             // Compare the image centers.
386                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
387                                     jpegImage.getHeight());
388                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
389                                     yuvImage.getHeight());
390 
391                             // Find scale difference between YUV and JPEG output
392                             Matrix m = new Matrix();
393                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
394                             RectF scaledYuv = new RectF();
395                             m.mapRect(scaledYuv, yuvDimens);
396                             float scale = scaledYuv.width() / yuvDimens.width();
397 
398                             final int PATCH_DIMEN = 40; // pixels in YUV
399 
400                             // Find matching square patch of pixels in YUV and JPEG output
401                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
402                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
403                                     yuvDimens.centerY() - tempPatch.centerY());
404                             Rect yuvPatch = new Rect();
405                             tempPatch.roundOut(yuvPatch);
406 
407                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
408                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
409                                     jpegDimens.centerY() - tempPatch.centerY());
410                             Rect jpegPatch = new Rect();
411                             tempPatch.roundOut(jpegPatch);
412 
413                             // Decode center patches
414                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
415                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
416                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
417                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
418 
419                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
420                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
421                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
422                                     /*isShareable*/true);
423                             BitmapFactory.Options opt = new BitmapFactory.Options();
424                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
425                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
426                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
427                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
428 
429                             // Compare two patches using average of per-pixel differences
430                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
431 
432                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
433                                     difference);
434                             if (difference > IMAGE_DIFFERENCE_TOLERANCE) {
435                                 // Dump files if running in verbose mode
436                                 if (DEBUG) {
437                                     String jpegFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz +
438                                             "_jpeg.jpg";
439                                     dumpFile(jpegFileName, jpegBmap);
440                                     String fullSizeJpegFileName = DEBUG_FILE_NAME_BASE + "/" +
441                                             captureSz + "_full_jpeg.jpg";
442                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
443                                     String yuvFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz +
444                                             "_yuv.jpg";
445                                     dumpFile(yuvFileName, yuvBmap);
446                                     String fullSizeYuvFileName = DEBUG_FILE_NAME_BASE + "/" +
447                                             captureSz + "_full_yuv.jpg";
448                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
449                                             yuvImage.getHeight(), 0, 0, yuvImage);
450                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
451                                             yuvImage.getWidth(), yuvImage.getHeight(),
452                                             Bitmap.Config.ARGB_8888);
453                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
454                                 }
455                                 fail("Camera " + mCamera.getId() + ": YUV and JPEG image at " +
456                                         "capture size " + captureSz + " for the same frame are " +
457                                         "not similar, center patches have difference metric of " +
458                                         difference);
459                             }
460 
461                             // Stop capture, delete the streams.
462                             stopCapture(/*fast*/false);
463                         } finally {
464                             closeImageReader(jpegReader);
465                             jpegReader = null;
466                             closeImageReader(yuvReader);
467                             yuvReader = null;
468                         }
469                     }
470                 }
471 
472             } finally {
473                 closeDevice(id);
474             }
475         }
476     }
477 
478     /**
479      * Convert a rectangular patch in a YUV image to an ARGB color array.
480      *
481      * @param w width of the patch.
482      * @param h height of the patch.
483      * @param wOffset offset of the left side of the patch.
484      * @param hOffset offset of the top of the patch.
485      * @param yuvImage a YUV image to select a patch from.
486      * @return the image patch converted to RGB as an ARGB color array.
487      */
convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, Image yuvImage)488     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
489                                                Image yuvImage) {
490         final int CHANNELS = 3; // yuv
491         final float COLOR_RANGE = 255f;
492 
493         assertTrue("Invalid argument to convertPixelYuvToRgba",
494                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
495         assertNotNull(yuvImage);
496 
497         int imageFormat = yuvImage.getFormat();
498         assertTrue("YUV image must have YUV-type format",
499                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
500                         imageFormat == ImageFormat.NV21);
501 
502         int height = yuvImage.getHeight();
503         int width = yuvImage.getWidth();
504 
505         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
506         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
507                 /*bottom*/hOffset + h);
508         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
509                 imageBounds.contains(crop));
510         Image.Plane[] planes = yuvImage.getPlanes();
511 
512         Image.Plane yPlane = planes[0];
513         Image.Plane cbPlane = planes[1];
514         Image.Plane crPlane = planes[2];
515 
516         ByteBuffer yBuf = yPlane.getBuffer();
517         int yPixStride = yPlane.getPixelStride();
518         int yRowStride = yPlane.getRowStride();
519         ByteBuffer cbBuf = cbPlane.getBuffer();
520         int cbPixStride = cbPlane.getPixelStride();
521         int cbRowStride = cbPlane.getRowStride();
522         ByteBuffer crBuf = crPlane.getBuffer();
523         int crPixStride = crPlane.getPixelStride();
524         int crRowStride = crPlane.getRowStride();
525 
526         int[] output = new int[w * h];
527 
528         // TODO: Optimize this with renderscript intrinsics
529         byte[] yRow = new byte[yPixStride * w];
530         byte[] cbRow = new byte[cbPixStride * w / 2];
531         byte[] crRow = new byte[crPixStride * w / 2];
532         yBuf.mark();
533         cbBuf.mark();
534         crBuf.mark();
535         int initialYPos = yBuf.position();
536         int initialCbPos = cbBuf.position();
537         int initialCrPos = crBuf.position();
538         int outputPos = 0;
539         for (int i = hOffset; i < hOffset + h; i++) {
540             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
541             yBuf.get(yRow);
542             if ((i & 1) == (hOffset & 1)) {
543                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
544                 cbBuf.get(cbRow);
545                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
546                 crBuf.get(crRow);
547             }
548             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
549                 float y = yRow[yPix] & 0xFF;
550                 float cb = cbRow[cbPix] & 0xFF;
551                 float cr = crRow[crPix] & 0xFF;
552 
553                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
554                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
555                 int g = (int) Math.max(0.0f,
556                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
557                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
558 
559                 // Convert to ARGB pixel color (use opaque alpha)
560                 output[outputPos++] = Color.rgb(r, g, b);
561 
562                 if ((j & 1) == 1) {
563                     crPix += crPixStride;
564                     cbPix += cbPixStride;
565                 }
566             }
567         }
568         yBuf.rewind();
569         cbBuf.rewind();
570         crBuf.rewind();
571 
572         return output;
573     }
574 
575     /**
576      * Test capture a given format stream with yuv stream simultaneously.
577      *
578      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
579      *
580      * @param format The capture format to be tested along with yuv format.
581      */
bufferFormatWithYuvTestByCamera(int format)582     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
583         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
584                 && format != ImageFormat.YUV_420_888) {
585             throw new IllegalArgumentException("Unsupported format: " + format);
586         }
587 
588         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
589         Size maxYuvSz = mOrderedPreviewSizes.get(0);
590         Size[] targetCaptureSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
591                 StaticMetadata.StreamDirection.Output);
592 
593         for (Size captureSz : targetCaptureSizes) {
594             if (VERBOSE) {
595                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
596                         + captureSz.toString() + " for camera " + mCamera.getId());
597             }
598 
599             ImageReader captureReader = null;
600             ImageReader yuvReader = null;
601             try {
602                 // Create YUV image reader
603                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
604                 yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
605                         yuvListener);
606                 Surface yuvSurface = yuvReader.getSurface();
607 
608                 // Create capture image reader
609                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
610                 captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
611                         captureListener);
612                 Surface captureSurface = captureReader.getSurface();
613 
614                 // Capture images.
615                 List<Surface> outputSurfaces = new ArrayList<Surface>();
616                 outputSurfaces.add(yuvSurface);
617                 outputSurfaces.add(captureSurface);
618                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
619                         CameraDevice.TEMPLATE_PREVIEW);
620                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
621 
622                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
623                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
624                 }
625 
626                 // Verify capture result and images
627                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
628                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
629                     if (VERBOSE) {
630                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
631                     }
632 
633                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
634                     if (VERBOSE) {
635                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
636                     }
637 
638                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
639                     if (VERBOSE) {
640                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
641                     }
642 
643                     //Validate captured images.
644                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
645                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
646                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
647                             captureSz.getHeight(), format, /*filePath*/null);
648                 }
649 
650                 // Stop capture, delete the streams.
651                 stopCapture(/*fast*/false);
652             } finally {
653                 closeImageReader(captureReader);
654                 captureReader = null;
655                 closeImageReader(yuvReader);
656                 yuvReader = null;
657             }
658         }
659     }
660 
invalidAccessTestAfterClose()661     private void invalidAccessTestAfterClose() throws Exception {
662         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
663             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
664 
665         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
666                 StaticMetadata.StreamDirection.Output);
667         Image img = null;
668         // Create ImageReader.
669         mListener = new SimpleImageListener();
670         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
671 
672         // Start capture.
673         CaptureRequest request = prepareCaptureRequest();
674         SimpleCaptureCallback listener = new SimpleCaptureCallback();
675         startCapture(request, /* repeating */false, listener, mHandler);
676 
677         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
678         img = mReader.acquireNextImage();
679         Plane firstPlane = img.getPlanes()[0];
680         ByteBuffer buffer = firstPlane.getBuffer();
681         img.close();
682 
683         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
684     }
685 
bufferFormatTestByCamera(int format, boolean repeating)686     private void bufferFormatTestByCamera(int format, boolean repeating) throws Exception {
687 
688         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
689                 StaticMetadata.StreamDirection.Output);
690 
691         // for each resolution, test imageReader:
692         for (Size sz : availableSizes) {
693             try {
694                 if (VERBOSE) {
695                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
696                             + " for camera " + mCamera.getId());
697                 }
698 
699                 // Create ImageReader.
700                 mListener  = new SimpleImageListener();
701                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
702 
703                 // Start capture.
704                 CaptureRequest request = prepareCaptureRequest();
705                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
706                 startCapture(request, repeating, listener, mHandler);
707 
708                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
709 
710                 // Validate images.
711                 validateImage(sz, format, numFrameVerified, repeating);
712 
713                 // Validate capture result.
714                 validateCaptureResult(format, sz, listener, numFrameVerified);
715 
716                 // stop capture.
717                 stopCapture(/*fast*/false);
718             } finally {
719                 closeDefaultImageReader();
720             }
721 
722         }
723     }
724 
bufferFormatLongProcessingTimeTestByCamera(int format)725     private void bufferFormatLongProcessingTimeTestByCamera(int format)
726             throws Exception {
727 
728         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
729         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
730         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
731 
732         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
733                 StaticMetadata.StreamDirection.Output);
734 
735         // for each resolution, test imageReader:
736         for (Size sz : availableSizes) {
737             Log.v(TAG, "testing size " + sz.toString());
738             try {
739                 if (VERBOSE) {
740                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
741                             format + " for camera " + mCamera.getId());
742                 }
743 
744                 // Create ImageReader.
745                 mListener  = new SimpleImageListener();
746                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
747 
748                 // Setting manual controls
749                 List<Surface> outputSurfaces = new ArrayList<Surface>();
750                 outputSurfaces.add(mReader.getSurface());
751                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
752                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
753 
754                 requestBuilder.set(
755                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
756                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
757                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
758                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
759                         CaptureRequest.CONTROL_AE_MODE_OFF);
760                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
761                         CaptureRequest.CONTROL_AWB_MODE_OFF);
762                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
763                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
764 
765                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
766                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
767 
768                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
769                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
770 
771                     // Verify image.
772                     Image img = mReader.acquireNextImage();
773                     assertNotNull("Unable to acquire next image", img);
774                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
775                             DEBUG_FILE_NAME_BASE);
776 
777                     // Verify the exposure time and iso match the requested values.
778                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
779 
780                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
781                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
782                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
783                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
784 
785                     mCollector.expectTrue(
786                             String.format("Long processing frame %d format %d size %s " +
787                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
788                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
789                                     TEST_EXPOSURE_TIME_NS),
790                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
791                             exposureTimeDiff > 0);
792 
793                     mCollector.expectTrue(
794                             String.format("Long processing frame %d format %d size %s " +
795                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
796                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
797                                     TEST_SENSITIVITY_VALUE),
798                             sensitivityDiff == 0);
799 
800 
801                     // Sleep to Simulate long porcessing before closing the image.
802                     Thread.sleep(LONG_PROCESS_TIME_MS);
803                     img.close();
804                 }
805                 // stop capture.
806                 stopCapture(/*fast*/false);
807             } finally {
808                 closeDefaultImageReader();
809             }
810         }
811     }
812 
813     /**
814      * Validate capture results.
815      *
816      * @param format The format of this capture.
817      * @param size The capture size.
818      * @param listener The capture listener to get capture result callbacks.
819      */
validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, int numFrameVerified)820     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
821             int numFrameVerified) {
822         for (int i = 0; i < numFrameVerified; i++) {
823             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
824 
825             // TODO: Update this to use availableResultKeys once shim supports this.
826             if (mStaticInfo.isCapabilitySupported(
827                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
828                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
829                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
830                 mCollector.expectInRange(
831                         String.format(
832                                 "Capture for format %d, size %s exposure time is invalid.",
833                                 format, size.toString()),
834                         exposureTime,
835                         mStaticInfo.getExposureMinimumOrDefault(),
836                         mStaticInfo.getExposureMaximumOrDefault()
837                 );
838                 mCollector.expectInRange(
839                         String.format("Capture for format %d, size %s sensitivity is invalid.",
840                                 format, size.toString()),
841                         sensitivity,
842                         mStaticInfo.getSensitivityMinimumOrDefault(),
843                         mStaticInfo.getSensitivityMaximumOrDefault()
844                 );
845             }
846             // TODO: add more key validations.
847         }
848     }
849 
850     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
851         private final ConditionVariable imageAvailable = new ConditionVariable();
852         @Override
onImageAvailable(ImageReader reader)853         public void onImageAvailable(ImageReader reader) {
854             if (mReader != reader) {
855                 return;
856             }
857 
858             if (VERBOSE) Log.v(TAG, "new image available");
859             imageAvailable.open();
860         }
861 
waitForAnyImageAvailable(long timeout)862         public void waitForAnyImageAvailable(long timeout) {
863             if (imageAvailable.block(timeout)) {
864                 imageAvailable.close();
865             } else {
866                 fail("wait for image available timed out after " + timeout + "ms");
867             }
868         }
869 
closePendingImages()870         public void closePendingImages() {
871             Image image = mReader.acquireLatestImage();
872             if (image != null) {
873                 image.close();
874             }
875         }
876     }
877 
validateImage(Size sz, int format, int captureCount, boolean repeating)878     private void validateImage(Size sz, int format, int captureCount,  boolean repeating)
879             throws Exception {
880         // TODO: Add more format here, and wrap each one as a function.
881         Image img;
882         final int MAX_RETRY_COUNT = 20;
883         int numImageVerified = 0;
884         int reTryCount = 0;
885         while (numImageVerified < captureCount) {
886             assertNotNull("Image listener is null", mListener);
887             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
888             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
889             if (repeating) {
890                 /**
891                  * Acquire the latest image in case the validation is slower than
892                  * the image producing rate.
893                  */
894                 img = mReader.acquireLatestImage();
895                 /**
896                  * Sometimes if multiple onImageAvailable callbacks being queued,
897                  * acquireLatestImage will clear all buffer before corresponding callback is
898                  * executed. Wait for a new frame in that case.
899                  */
900                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
901                     reTryCount++;
902                     continue;
903                 }
904             } else {
905                 img = mReader.acquireNextImage();
906             }
907             assertNotNull("Unable to acquire the latest image", img);
908             if (VERBOSE) Log.v(TAG, "Got the latest image");
909             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
910                     DEBUG_FILE_NAME_BASE);
911             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
912             img.close();
913             numImageVerified++;
914             reTryCount = 0;
915         }
916 
917         // Return all pending images to the ImageReader as the validateImage may
918         // take a while to return and there could be many images pending.
919         mListener.closePendingImages();
920     }
921 }
922