1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import android.content.Context;
20 import android.graphics.Bitmap;
21 import android.graphics.BitmapFactory;
22 import android.graphics.BitmapRegionDecoder;
23 import android.graphics.Canvas;
24 import android.graphics.Color;
25 import android.graphics.ImageFormat;
26 import android.graphics.Matrix;
27 import android.graphics.PixelFormat;
28 import android.graphics.Rect;
29 import android.graphics.RectF;
30 import android.hardware.HardwareBuffer;
31 import android.hardware.camera2.CameraCharacteristics;
32 import android.hardware.camera2.CameraDevice;
33 import android.hardware.camera2.CaptureRequest;
34 import android.hardware.camera2.CaptureResult;
35 import android.hardware.camera2.cts.CameraTestUtils.ImageDropperListener;
36 import android.hardware.camera2.cts.helpers.StaticMetadata;
37 import android.hardware.camera2.cts.rs.BitmapUtils;
38 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
39 import android.hardware.camera2.params.OutputConfiguration;
40 import android.hardware.camera2.params.StreamConfigurationMap;
41 import android.media.Image;
42 import android.media.Image.Plane;
43 import android.media.ImageReader;
44 import android.os.ConditionVariable;
45 import android.util.Log;
46 import android.util.Size;
47 import android.view.Surface;
48 
49 import com.android.ex.camera2.blocking.BlockingSessionCallback;
50 
51 import java.nio.ByteBuffer;
52 import java.util.ArrayList;
53 import java.util.Arrays;
54 import java.util.List;
55 import java.util.Set;
56 
57 import org.junit.runner.RunWith;
58 import org.junit.runners.Parameterized;
59 import org.junit.Test;
60 
61 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
62 import static android.hardware.camera2.cts.CameraTestUtils.SESSION_READY_TIMEOUT_MS;
63 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
64 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
65 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
66 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
67 import static com.google.common.truth.Truth.assertWithMessage;
68 import static junit.framework.Assert.*;
69 
70 /**
71  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
72  * sends the data to the surface provided by imageReader. Below image formats
73  * are tested:</p>
74  *
75  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
76  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
77  * <p>Some invalid access test. </p>
78  * <p>TODO: Add more format tests? </p>
79  */
80 @RunWith(Parameterized.class)
81 public class ImageReaderTest extends Camera2AndroidTestCase {
82     private static final String TAG = "ImageReaderTest";
83     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
84     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
85 
86     // Number of frame (for streaming requests) to be verified.
87     private static final int NUM_FRAME_VERIFIED = 2;
88     // Number of frame (for streaming requests) to be verified with log processing time.
89     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
90     // The time to hold each image for to simulate long processing time.
91     private static final int LONG_PROCESS_TIME_MS = 300;
92     // Max number of images can be accessed simultaneously from ImageReader.
93     private static final int MAX_NUM_IMAGES = 5;
94     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
95     // generous to avoid false positives due to punch/saturation operations vendors apply to the
96     // JPEG outputs.
97     private static final double IMAGE_DIFFERENCE_TOLERANCE = 40;
98     // Legacy level devices needs even larger tolerance because jpeg and yuv are not captured
99     // from the same frame in legacy mode.
100     private static final double IMAGE_DIFFERENCE_TOLERANCE_LEGACY = 60;
101 
102     private SimpleImageListener mListener;
103 
104     @Override
setUp()105     public void setUp() throws Exception {
106         super.setUp();
107     }
108 
109     @Override
tearDown()110     public void tearDown() throws Exception {
111         super.tearDown();
112     }
113 
114     @Test
testFlexibleYuv()115     public void testFlexibleYuv() throws Exception {
116         for (String id : mCameraIdsUnderTest) {
117             try {
118                 Log.i(TAG, "Testing Camera " + id);
119                 openDevice(id);
120                 bufferFormatTestByCamera(ImageFormat.YUV_420_888, /*repeating*/true);
121             } finally {
122                 closeDevice(id);
123             }
124         }
125     }
126 
127     @Test
testDepth16()128     public void testDepth16() throws Exception {
129         for (String id : mCameraIdsUnderTest) {
130             try {
131                 Log.i(TAG, "Testing Camera " + id);
132                 openDevice(id);
133                 bufferFormatTestByCamera(ImageFormat.DEPTH16, /*repeating*/true);
134             } finally {
135                 closeDevice(id);
136             }
137         }
138     }
139 
140     @Test
testDepthPointCloud()141     public void testDepthPointCloud() throws Exception {
142         for (String id : mCameraIdsUnderTest) {
143             try {
144                 Log.i(TAG, "Testing Camera " + id);
145                 openDevice(id);
146                 bufferFormatTestByCamera(ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
147             } finally {
148                 closeDevice(id);
149             }
150         }
151     }
152 
153     @Test
testDynamicDepth()154     public void testDynamicDepth() throws Exception {
155         for (String id : mCameraIdsUnderTest) {
156             try {
157                 openDevice(id);
158                 bufferFormatTestByCamera(ImageFormat.DEPTH_JPEG, /*repeating*/true,
159                         /*checkSession*/ true);
160             } finally {
161                 closeDevice(id);
162             }
163         }
164     }
165 
166     @Test
testY8()167     public void testY8() throws Exception {
168         for (String id : mCameraIdsUnderTest) {
169             try {
170                 Log.i(TAG, "Testing Camera " + id);
171                 openDevice(id);
172                 bufferFormatTestByCamera(ImageFormat.Y8, /*repeating*/true);
173             } finally {
174                 closeDevice(id);
175             }
176         }
177     }
178 
179     @Test
testJpeg()180     public void testJpeg() throws Exception {
181         for (String id : mCameraIdsUnderTest) {
182             try {
183                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
184                 openDevice(id);
185                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/false);
186             } finally {
187                 closeDevice(id);
188             }
189         }
190     }
191 
192     @Test
testRaw()193     public void testRaw() throws Exception {
194         for (String id : mCameraIdsUnderTest) {
195             try {
196                 Log.v(TAG, "Testing raw capture for camera " + id);
197                 openDevice(id);
198 
199                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/false);
200             } finally {
201                 closeDevice(id);
202             }
203         }
204     }
205 
206     @Test
testRawPrivate()207     public void testRawPrivate() throws Exception {
208         for (String id : mCameraIdsUnderTest) {
209             try {
210                 Log.v(TAG, "Testing raw capture for camera " + id);
211                 openDevice(id);
212 
213                 bufferFormatTestByCamera(ImageFormat.RAW_PRIVATE, /*repeating*/false);
214             } finally {
215                 closeDevice(id);
216             }
217         }
218     }
219 
220     @Test
testP010()221     public void testP010() throws Exception {
222         for (String id : mCameraIdsUnderTest) {
223             try {
224                 Log.v(TAG, "Testing YUV P010 capture for Camera " + id);
225                 openDevice(id);
226                 bufferFormatTestByCamera(ImageFormat.YCBCR_P010, /*repeating*/false);
227             } finally {
228                 closeDevice(id);
229             }
230         }
231     }
232 
233     @Test
testHeic()234     public void testHeic() throws Exception {
235         for (String id : mCameraIdsUnderTest) {
236             try {
237                 Log.v(TAG, "Testing heic capture for Camera " + id);
238                 openDevice(id);
239                 bufferFormatTestByCamera(ImageFormat.HEIC, /*repeating*/false);
240             } finally {
241                 closeDevice(id);
242             }
243         }
244     }
245 
246     @Test
testRepeatingJpeg()247     public void testRepeatingJpeg() throws Exception {
248         for (String id : mCameraIdsUnderTest) {
249             try {
250                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
251                 openDevice(id);
252                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/true);
253             } finally {
254                 closeDevice(id);
255             }
256         }
257     }
258 
259     @Test
testRepeatingRaw()260     public void testRepeatingRaw() throws Exception {
261         for (String id : mCameraIdsUnderTest) {
262             try {
263                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
264                 openDevice(id);
265 
266                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/true);
267             } finally {
268                 closeDevice(id);
269             }
270         }
271     }
272 
273     @Test
testRepeatingRawPrivate()274     public void testRepeatingRawPrivate() throws Exception {
275         for (String id : mCameraIdsUnderTest) {
276             try {
277                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
278                 openDevice(id);
279 
280                 bufferFormatTestByCamera(ImageFormat.RAW_PRIVATE, /*repeating*/true);
281             } finally {
282                 closeDevice(id);
283             }
284         }
285     }
286 
287     @Test
testRepeatingHeic()288     public void testRepeatingHeic() throws Exception {
289         for (String id : mCameraIdsUnderTest) {
290             try {
291                 Log.v(TAG, "Testing repeating heic capture for Camera " + id);
292                 openDevice(id);
293                 bufferFormatTestByCamera(ImageFormat.HEIC, /*repeating*/true);
294             } finally {
295                 closeDevice(id);
296             }
297         }
298     }
299 
300     @Test
testLongProcessingRepeatingRaw()301     public void testLongProcessingRepeatingRaw() throws Exception {
302         for (String id : mCameraIdsUnderTest) {
303             try {
304                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
305 
306                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
307                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
308                     continue;
309                 }
310                 openDevice(id);
311 
312                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
313             } finally {
314                 closeDevice(id);
315             }
316         }
317     }
318 
319     @Test
testLongProcessingRepeatingFlexibleYuv()320     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
321         for (String id : mCameraIdsUnderTest) {
322             try {
323                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
324 
325                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
326                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
327                     continue;
328                 }
329 
330                 openDevice(id);
331                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
332             } finally {
333                 closeDevice(id);
334             }
335         }
336     }
337 
338     /**
339      * Test invalid access of image after an image is closed, further access
340      * of the image will get an IllegalStateException. The basic assumption of
341      * this test is that the ImageReader always gives direct byte buffer, which is always true
342      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
343      * is no guarantee to get an ISE for this invalid access case.
344      */
345     @Test
testInvalidAccessTest()346     public void testInvalidAccessTest() throws Exception {
347         // Test byte buffer access after an image is released, it should throw ISE.
348         for (String id : mCameraIdsUnderTest) {
349             try {
350                 Log.v(TAG, "Testing invalid image access for Camera " + id);
351                 openDevice(id);
352                 invalidAccessTestAfterClose();
353             } finally {
354                 closeDevice(id);
355                 closeDefaultImageReader();
356             }
357         }
358     }
359 
360     /**
361      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
362      *
363      * <p>Both stream formats are mandatory for Camera2 API</p>
364      */
365     @Test
testYuvAndJpeg()366     public void testYuvAndJpeg() throws Exception {
367         for (String id : mCameraIdsUnderTest) {
368             try {
369                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
370                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
371                     Log.i(TAG, "Camera " + id +
372                             " does not support color outputs, skipping");
373                     continue;
374                 }
375                 openDevice(id);
376                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
377             } finally {
378                 closeDevice(id);
379             }
380         }
381     }
382 
383     /**
384      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader with the ImageReader
385      * factory method that has usage flag argument.
386      *
387      * <p>Both stream formats are mandatory for Camera2 API</p>
388      */
389     @Test
testYuvAndJpegWithUsageFlag()390     public void testYuvAndJpegWithUsageFlag() throws Exception {
391         for (String id : mCameraIdsUnderTest) {
392             try {
393                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
394                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
395                     Log.i(TAG, "Camera " + id +
396                             " does not support color outputs, skipping");
397                     continue;
398                 }
399                 openDevice(id);
400                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG, true);
401             } finally {
402                 closeDevice(id);
403             }
404         }
405     }
406 
407     /**
408      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
409      *
410      */
411     @Test
testImageReaderYuvAndRaw()412     public void testImageReaderYuvAndRaw() throws Exception {
413         for (String id : mCameraIdsUnderTest) {
414             try {
415                 Log.v(TAG, "YUV and RAW testing for camera " + id);
416                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
417                     Log.i(TAG, "Camera " + id +
418                             " does not support color outputs, skipping");
419                     continue;
420                 }
421                 openDevice(id);
422                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
423             } finally {
424                 closeDevice(id);
425             }
426         }
427     }
428 
429     /**
430      * If the camera device advertises the SECURE_IAMGE_DATA capability, test
431      * ImageFormat.PRIVATE + PROTECTED usage capture by using ImageReader with the
432      * ImageReader factory method that has usage flag argument, and uses a custom usage flag.
433      */
434     @Test
testImageReaderPrivateWithProtectedUsageFlag()435     public void testImageReaderPrivateWithProtectedUsageFlag() throws Exception {
436         for (String id : mCameraIdsUnderTest) {
437             try {
438                 Log.v(TAG, "Private format and protected usage testing for camera " + id);
439                 List<String> testCameraIds = new ArrayList<>();
440 
441                 if (mAllStaticInfo.get(id).isCapabilitySupported(
442                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
443                     // Test the camera id without using physical camera
444                     testCameraIds.add(null);
445                 }
446 
447                 if (mAllStaticInfo.get(id).isLogicalMultiCamera()) {
448                     Set<String> physicalIdsSet =
449                         mAllStaticInfo.get(id).getCharacteristics().getPhysicalCameraIds();
450                     for (String physicalId : physicalIdsSet) {
451                         if (mAllStaticInfo.get(physicalId).isCapabilitySupported(
452                                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
453                             testCameraIds.add(physicalId);
454                         }
455                     }
456                 }
457 
458                 if (testCameraIds.isEmpty()) {
459                     Log.i(TAG, "Camera " + id +
460                             " does not support secure image data capability, skipping");
461                     continue;
462                 }
463                 openDevice(id);
464 
465                 for (String testCameraId : testCameraIds) {
466                     bufferFormatTestByCamera(ImageFormat.PRIVATE, /*setUsageFlag*/ true,
467                             HardwareBuffer.USAGE_PROTECTED_CONTENT, /*repeating*/ true,
468                             /*checkSession*/ true, /*validateImageData*/ false,
469                             testCameraId);
470                 }
471             } finally {
472                 closeDevice(id);
473             }
474         }
475     }
476 
477     /**
478      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader with the
479      * ImageReader factory method that has usage flag argument.
480      *
481      */
482     @Test
testImageReaderYuvAndRawWithUsageFlag()483     public void testImageReaderYuvAndRawWithUsageFlag() throws Exception {
484         for (String id : mCameraIdsUnderTest) {
485             try {
486                 Log.v(TAG, "YUV and RAW testing for camera " + id);
487                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
488                     Log.i(TAG, "Camera " + id +
489                             " does not support color outputs, skipping");
490                     continue;
491                 }
492                 openDevice(id);
493                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR, true);
494             } finally {
495                 closeDevice(id);
496             }
497         }
498     }
499 
500     /**
501      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
502      * resolution and format supported.
503      */
504     @Test
testAllOutputYUVResolutions()505     public void testAllOutputYUVResolutions() throws Exception {
506         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
507                 BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
508         for (String id : mCameraIdsUnderTest) {
509             try {
510                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
511 
512                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
513                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
514                     continue;
515                 }
516 
517                 openDevice(id);
518                 // Skip warmup on FULL mode devices.
519                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
520                         MAX_NUM_IMAGES - 1 : 0;
521 
522                 // NV21 isn't supported by ImageReader.
523                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
524 
525                 CameraCharacteristics.Key<StreamConfigurationMap> key =
526                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
527                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
528                 int[] supportedFormats = config.getOutputFormats();
529                 List<Integer> supportedYUVFormats = new ArrayList<>();
530                 for (int format : YUVFormats) {
531                     if (CameraTestUtils.contains(supportedFormats, format)) {
532                         supportedYUVFormats.add(format);
533                     }
534                 }
535 
536                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
537                         StaticMetadata.StreamDirection.Output);
538                 assertFalse("JPEG output not supported for camera " + id +
539                         ", at least one JPEG output is required.", jpegSizes.length == 0);
540 
541                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
542                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
543                 Size QCIF = new Size(176, 144);
544                 Size FULL_HD = new Size(1920, 1080);
545                 for (int format : supportedYUVFormats) {
546                     Size[] targetCaptureSizes =
547                             mStaticInfo.getAvailableSizesForFormatChecked(format,
548                             StaticMetadata.StreamDirection.Output);
549 
550                     for (Size captureSz : targetCaptureSizes) {
551                         if (VERBOSE) {
552                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
553                                     + maxJpegSize + " for camera " + mCamera.getId());
554                         }
555 
556                         ImageReader jpegReader = null;
557                         ImageReader yuvReader = null;
558                         try {
559                             // Create YUV image reader
560                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
561                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
562                                     yuvListener);
563                             Surface yuvSurface = yuvReader.getSurface();
564 
565                             // Create JPEG image reader
566                             SimpleImageReaderListener jpegListener =
567                                     new SimpleImageReaderListener();
568                             jpegReader = createImageReader(maxJpegSize,
569                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
570                             Surface jpegSurface = jpegReader.getSurface();
571 
572                             // Setup session
573                             List<Surface> outputSurfaces = new ArrayList<Surface>();
574                             outputSurfaces.add(yuvSurface);
575                             outputSurfaces.add(jpegSurface);
576                             createSession(outputSurfaces);
577 
578                             int state = mCameraSessionListener.getStateWaiter().waitForAnyOfStates(
579                                         Arrays.asList(sessionStates),
580                                         CameraTestUtils.SESSION_CONFIGURE_TIMEOUT_MS);
581 
582                             if (state == BlockingSessionCallback.SESSION_CONFIGURE_FAILED) {
583                                 if (captureSz.getWidth() > maxPreviewSize.getWidth() ||
584                                         captureSz.getHeight() > maxPreviewSize.getHeight()) {
585                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
586                                             + " ,jpeg:" + maxJpegSize + "} for camera "
587                                             + mCamera.getId() +
588                                             " because full size jpeg + yuv larger than "
589                                             + "max preview size (" + maxPreviewSize
590                                             + ") is not supported");
591                                     continue;
592                                 } else if (captureSz.equals(QCIF) &&
593                                         ((maxJpegSize.getWidth() > FULL_HD.getWidth()) ||
594                                          (maxJpegSize.getHeight() > FULL_HD.getHeight()))) {
595                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
596                                             + " ,jpeg:" + maxJpegSize + "} for camera "
597                                             + mCamera.getId() +
598                                             " because QCIF + >Full_HD size is not supported");
599                                     continue;
600                                 } else {
601                                     fail("Camera " + mCamera.getId() +
602                                             ":session configuration failed for {jpeg: " +
603                                             maxJpegSize + ", yuv: " + captureSz + "}");
604                                 }
605                             }
606 
607                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
608                             CaptureRequest.Builder warmupRequest =
609                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
610                             warmupRequest.addTarget(yuvSurface);
611                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
612                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
613 
614                             for (int i = 0; i < warmupCaptureNumber; i++) {
615                                 startCapture(warmupRequest.build(), /*repeating*/false,
616                                         resultListener, mHandler);
617                             }
618                             for (int i = 0; i < warmupCaptureNumber; i++) {
619                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
620                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
621                                 image.close();
622                             }
623 
624                             // Capture image.
625                             CaptureRequest.Builder mainRequest =
626                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
627                             for (Surface s : outputSurfaces) {
628                                 mainRequest.addTarget(s);
629                             }
630 
631                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
632                                     mHandler);
633 
634                             // Verify capture result and images
635                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
636 
637                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
638                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
639 
640                             //Validate captured images.
641                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
642                                     captureSz.getHeight(), format, /*filePath*/null);
643                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
644                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
645 
646                             // Compare the image centers.
647                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
648                                     jpegImage.getHeight());
649                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
650                                     yuvImage.getHeight());
651 
652                             // Find scale difference between YUV and JPEG output
653                             Matrix m = new Matrix();
654                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
655                             RectF scaledYuv = new RectF();
656                             m.mapRect(scaledYuv, yuvDimens);
657                             float scale = scaledYuv.width() / yuvDimens.width();
658 
659                             final int PATCH_DIMEN = 40; // pixels in YUV
660 
661                             // Find matching square patch of pixels in YUV and JPEG output
662                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
663                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
664                                     yuvDimens.centerY() - tempPatch.centerY());
665                             Rect yuvPatch = new Rect();
666                             tempPatch.roundOut(yuvPatch);
667 
668                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
669                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
670                                     jpegDimens.centerY() - tempPatch.centerY());
671                             Rect jpegPatch = new Rect();
672                             tempPatch.roundOut(jpegPatch);
673 
674                             // Decode center patches
675                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
676                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
677                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
678                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
679 
680                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
681                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
682                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
683                                     /*isShareable*/true);
684                             BitmapFactory.Options opt = new BitmapFactory.Options();
685                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
686                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
687                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
688                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
689 
690                             // Compare two patches using average of per-pixel differences
691                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
692                             double tolerance = IMAGE_DIFFERENCE_TOLERANCE;
693                             if (mStaticInfo.isHardwareLevelLegacy()) {
694                                 tolerance = IMAGE_DIFFERENCE_TOLERANCE_LEGACY;
695                             }
696                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
697                                     difference);
698                             if (difference > tolerance) {
699                                 // Dump files if running in verbose mode
700                                 if (DEBUG) {
701                                     String jpegFileName = mDebugFileNameBase + "/" + captureSz +
702                                             "_jpeg.jpg";
703                                     dumpFile(jpegFileName, jpegBmap);
704                                     String fullSizeJpegFileName = mDebugFileNameBase + "/" +
705                                             captureSz + "_full_jpeg.jpg";
706                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
707                                     String yuvFileName = mDebugFileNameBase + "/" + captureSz +
708                                             "_yuv.jpg";
709                                     dumpFile(yuvFileName, yuvBmap);
710                                     String fullSizeYuvFileName = mDebugFileNameBase + "/" +
711                                             captureSz + "_full_yuv.jpg";
712                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
713                                             yuvImage.getHeight(), 0, 0, yuvImage);
714                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
715                                             yuvImage.getWidth(), yuvImage.getHeight(),
716                                             Bitmap.Config.ARGB_8888);
717                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
718                                 }
719                                 fail("Camera " + mCamera.getId() + ": YUV and JPEG image at " +
720                                         "capture size " + captureSz + " for the same frame are " +
721                                         "not similar, center patches have difference metric of " +
722                                         difference + ", tolerance is " + tolerance);
723                             }
724 
725                             // Stop capture, delete the streams.
726                             stopCapture(/*fast*/false);
727                             yuvImage.close();
728                             jpegImage.close();
729                             yuvListener.drain();
730                             jpegListener.drain();
731                         } finally {
732                             closeImageReader(jpegReader);
733                             jpegReader = null;
734                             closeImageReader(yuvReader);
735                             yuvReader = null;
736                         }
737                     }
738                 }
739 
740             } finally {
741                 closeDevice(id);
742             }
743         }
744     }
745 
746     /**
747      * Test that images captured after discarding free buffers are valid.
748      */
749     @Test
testDiscardFreeBuffers()750     public void testDiscardFreeBuffers() throws Exception {
751         for (String id : mCameraIdsUnderTest) {
752             try {
753                 Log.v(TAG, "Testing discardFreeBuffers for Camera " + id);
754                 openDevice(id);
755                 discardFreeBuffersTestByCamera();
756             } finally {
757                 closeDevice(id);
758             }
759         }
760     }
761 
762     /** Tests that usage bits are preserved */
763     @Test
testUsageRespected()764     public void testUsageRespected() throws Exception {
765         final long REQUESTED_USAGE_BITS =
766                 HardwareBuffer.USAGE_GPU_COLOR_OUTPUT | HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE;
767         ImageReader reader = ImageReader.newInstance(1, 1, PixelFormat.RGBA_8888, 1,
768                 REQUESTED_USAGE_BITS);
769         Surface surface = reader.getSurface();
770         Canvas canvas = surface.lockHardwareCanvas();
771         canvas.drawColor(Color.RED);
772         surface.unlockCanvasAndPost(canvas);
773         Image image = null;
774         for (int i = 0; i < 100; i++) {
775             image = reader.acquireNextImage();
776             if (image != null) break;
777             Thread.sleep(10);
778         }
779         assertNotNull(image);
780         HardwareBuffer buffer = image.getHardwareBuffer();
781         assertNotNull(buffer);
782         // Mask off the upper vendor bits
783         int myBits = (int) (buffer.getUsage() & 0xFFFFFFF);
784         assertWithMessage("Usage bits %s did not contain requested usage bits %s", myBits,
785                 REQUESTED_USAGE_BITS).that(myBits & REQUESTED_USAGE_BITS)
786                         .isEqualTo(REQUESTED_USAGE_BITS);
787     }
788 
789     /**
790      * Convert a rectangular patch in a YUV image to an ARGB color array.
791      *
792      * @param w width of the patch.
793      * @param h height of the patch.
794      * @param wOffset offset of the left side of the patch.
795      * @param hOffset offset of the top of the patch.
796      * @param yuvImage a YUV image to select a patch from.
797      * @return the image patch converted to RGB as an ARGB color array.
798      */
convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, Image yuvImage)799     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
800                                                Image yuvImage) {
801         final int CHANNELS = 3; // yuv
802         final float COLOR_RANGE = 255f;
803 
804         assertTrue("Invalid argument to convertPixelYuvToRgba",
805                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
806         assertNotNull(yuvImage);
807 
808         int imageFormat = yuvImage.getFormat();
809         assertTrue("YUV image must have YUV-type format",
810                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
811                         imageFormat == ImageFormat.NV21);
812 
813         int height = yuvImage.getHeight();
814         int width = yuvImage.getWidth();
815 
816         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
817         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
818                 /*bottom*/hOffset + h);
819         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
820                 imageBounds.contains(crop));
821         Image.Plane[] planes = yuvImage.getPlanes();
822 
823         Image.Plane yPlane = planes[0];
824         Image.Plane cbPlane = planes[1];
825         Image.Plane crPlane = planes[2];
826 
827         ByteBuffer yBuf = yPlane.getBuffer();
828         int yPixStride = yPlane.getPixelStride();
829         int yRowStride = yPlane.getRowStride();
830         ByteBuffer cbBuf = cbPlane.getBuffer();
831         int cbPixStride = cbPlane.getPixelStride();
832         int cbRowStride = cbPlane.getRowStride();
833         ByteBuffer crBuf = crPlane.getBuffer();
834         int crPixStride = crPlane.getPixelStride();
835         int crRowStride = crPlane.getRowStride();
836 
837         int[] output = new int[w * h];
838 
839         // TODO: Optimize this with renderscript intrinsics
840         byte[] yRow = new byte[yPixStride * (w - 1) + 1];
841         byte[] cbRow = new byte[cbPixStride * (w / 2 - 1) + 1];
842         byte[] crRow = new byte[crPixStride * (w / 2 - 1) + 1];
843         yBuf.mark();
844         cbBuf.mark();
845         crBuf.mark();
846         int initialYPos = yBuf.position();
847         int initialCbPos = cbBuf.position();
848         int initialCrPos = crBuf.position();
849         int outputPos = 0;
850         for (int i = hOffset; i < hOffset + h; i++) {
851             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
852             yBuf.get(yRow);
853             if ((i & 1) == (hOffset & 1)) {
854                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
855                 cbBuf.get(cbRow);
856                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
857                 crBuf.get(crRow);
858             }
859             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
860                 float y = yRow[yPix] & 0xFF;
861                 float cb = cbRow[cbPix] & 0xFF;
862                 float cr = crRow[crPix] & 0xFF;
863 
864                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
865                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
866                 int g = (int) Math.max(0.0f,
867                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
868                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
869 
870                 // Convert to ARGB pixel color (use opaque alpha)
871                 output[outputPos++] = Color.rgb(r, g, b);
872 
873                 if ((j & 1) == 1) {
874                     crPix += crPixStride;
875                     cbPix += cbPixStride;
876                 }
877             }
878         }
879         yBuf.rewind();
880         cbBuf.rewind();
881         crBuf.rewind();
882 
883         return output;
884     }
885 
886     /**
887      * Test capture a given format stream with yuv stream simultaneously.
888      *
889      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
890      *
891      * @param format The capture format to be tested along with yuv format.
892      */
bufferFormatWithYuvTestByCamera(int format)893     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
894         bufferFormatWithYuvTestByCamera(format, false);
895     }
896 
897     /**
898      * Test capture a given format stream with yuv stream simultaneously.
899      *
900      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
901      *
902      * @param format The capture format to be tested along with yuv format.
903      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
904      *                     usage flag)
905      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)906     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)
907             throws Exception {
908         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
909                 && format != ImageFormat.YUV_420_888) {
910             throw new IllegalArgumentException("Unsupported format: " + format);
911         }
912 
913         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
914         Size maxYuvSz = mOrderedPreviewSizes.get(0);
915         Size[] targetCaptureSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
916                 StaticMetadata.StreamDirection.Output);
917 
918         for (Size captureSz : targetCaptureSizes) {
919             if (VERBOSE) {
920                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
921                         + captureSz.toString() + " for camera " + mCamera.getId());
922             }
923 
924             ImageReader captureReader = null;
925             ImageReader yuvReader = null;
926             try {
927                 // Create YUV image reader
928                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
929                 if (setUsageFlag) {
930                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
931                             HardwareBuffer.USAGE_CPU_READ_OFTEN, yuvListener);
932                 } else {
933                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
934                             yuvListener);
935                 }
936 
937                 Surface yuvSurface = yuvReader.getSurface();
938 
939                 // Create capture image reader
940                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
941                 if (setUsageFlag) {
942                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
943                             HardwareBuffer.USAGE_CPU_READ_OFTEN, captureListener);
944                 } else {
945                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
946                             captureListener);
947                 }
948                 Surface captureSurface = captureReader.getSurface();
949 
950                 // Capture images.
951                 List<Surface> outputSurfaces = new ArrayList<Surface>();
952                 outputSurfaces.add(yuvSurface);
953                 outputSurfaces.add(captureSurface);
954                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
955                         CameraDevice.TEMPLATE_PREVIEW);
956                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
957 
958                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
959                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
960                 }
961 
962                 // Verify capture result and images
963                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
964                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
965                     if (VERBOSE) {
966                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
967                     }
968 
969                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
970                     if (VERBOSE) {
971                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
972                     }
973 
974                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
975                     if (VERBOSE) {
976                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
977                     }
978 
979                     //Validate captured images.
980                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
981                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
982                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
983                             captureSz.getHeight(), format, /*filePath*/null);
984                     yuvImage.close();
985                     captureImage.close();
986                 }
987 
988                 // Stop capture, delete the streams.
989                 stopCapture(/*fast*/false);
990             } finally {
991                 closeImageReader(captureReader);
992                 captureReader = null;
993                 closeImageReader(yuvReader);
994                 yuvReader = null;
995             }
996         }
997     }
998 
invalidAccessTestAfterClose()999     private void invalidAccessTestAfterClose() throws Exception {
1000         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1001             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1002 
1003         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1004                 StaticMetadata.StreamDirection.Output);
1005         Image img = null;
1006         // Create ImageReader.
1007         mListener = new SimpleImageListener();
1008         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
1009 
1010         // Start capture.
1011         CaptureRequest request = prepareCaptureRequest();
1012         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1013         startCapture(request, /* repeating */false, listener, mHandler);
1014 
1015         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1016         img = mReader.acquireNextImage();
1017         Plane firstPlane = img.getPlanes()[0];
1018         ByteBuffer buffer = firstPlane.getBuffer();
1019         img.close();
1020 
1021         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
1022     }
1023 
1024     /**
1025      * Test that images captured after discarding free buffers are valid.
1026      */
discardFreeBuffersTestByCamera()1027     private void discardFreeBuffersTestByCamera() throws Exception {
1028         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1029             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1030 
1031         final Size SIZE = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1032                 StaticMetadata.StreamDirection.Output)[0];
1033         // Create ImageReader.
1034         mListener = new SimpleImageListener();
1035         createDefaultImageReader(SIZE, FORMAT, MAX_NUM_IMAGES, mListener);
1036 
1037         // Start capture.
1038         final boolean REPEATING = true;
1039         final boolean SINGLE = false;
1040         CaptureRequest request = prepareCaptureRequest();
1041         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1042         startCapture(request, REPEATING, listener, mHandler);
1043 
1044         // Validate images and capture results.
1045         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING);
1046         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1047 
1048         // Discard free buffers.
1049         mReader.discardFreeBuffers();
1050 
1051         // Validate images and capture resulst again.
1052         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING);
1053         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1054 
1055         // Stop repeating request in preparation for discardFreeBuffers
1056         mCameraSession.stopRepeating();
1057         mCameraSessionListener.getStateWaiter().waitForState(
1058                 BlockingSessionCallback.SESSION_READY, SESSION_READY_TIMEOUT_MS);
1059 
1060         // Drain the reader queue and discard free buffers from the reader.
1061         Image img = mReader.acquireLatestImage();
1062         if (img != null) {
1063             img.close();
1064         }
1065         mReader.discardFreeBuffers();
1066 
1067         // Do a single capture for camera device to reallocate buffers
1068         mListener.reset();
1069         startCapture(request, SINGLE, listener, mHandler);
1070         validateImage(SIZE, FORMAT, /*captureCount*/1, SINGLE);
1071     }
1072 
bufferFormatTestByCamera(int format, boolean repeating)1073     private void bufferFormatTestByCamera(int format, boolean repeating) throws Exception {
1074         bufferFormatTestByCamera(format, /*setUsageFlag*/ false,
1075                 HardwareBuffer.USAGE_CPU_READ_OFTEN, repeating,
1076                 /*checkSession*/ false, /*validateImageData*/ true);
1077     }
1078 
bufferFormatTestByCamera(int format, boolean repeating, boolean checkSession)1079     private void bufferFormatTestByCamera(int format, boolean repeating, boolean checkSession)
1080             throws Exception {
1081         bufferFormatTestByCamera(format, /*setUsageFlag*/ false,
1082                 HardwareBuffer.USAGE_CPU_READ_OFTEN,
1083                 repeating, checkSession, /*validateImageData*/true);
1084     }
1085 
bufferFormatTestByCamera(int format, boolean setUsageFlag, long usageFlag, boolean repeating, boolean checkSession, boolean validateImageData)1086     private void bufferFormatTestByCamera(int format, boolean setUsageFlag, long usageFlag,
1087             boolean repeating, boolean checkSession, boolean validateImageData) throws Exception {
1088         bufferFormatTestByCamera(format, setUsageFlag, usageFlag, repeating, checkSession,
1089                 validateImageData, /*physicalId*/null);
1090     }
1091 
bufferFormatTestByCamera(int format, boolean setUsageFlag, long usageFlag, boolean repeating, boolean checkSession, boolean validateImageData, String physicalId)1092     private void bufferFormatTestByCamera(int format, boolean setUsageFlag, long usageFlag,
1093             // TODO: Consider having some sort of test configuration class passed to reduce the
1094             //       proliferation of parameters ?
1095             boolean repeating, boolean checkSession, boolean validateImageData, String physicalId)
1096             throws Exception {
1097         StaticMetadata staticInfo;
1098         if (physicalId == null) {
1099             staticInfo = mStaticInfo;
1100         } else {
1101             staticInfo = mAllStaticInfo.get(physicalId);
1102         }
1103 
1104         Size[] availableSizes = staticInfo.getAvailableSizesForFormatChecked(format,
1105                 StaticMetadata.StreamDirection.Output);
1106 
1107         boolean secureTest = setUsageFlag &&
1108                 ((usageFlag & HardwareBuffer.USAGE_PROTECTED_CONTENT) != 0);
1109         Size secureDataSize = null;
1110         if (secureTest) {
1111             secureDataSize = staticInfo.getCharacteristics().get(
1112                     CameraCharacteristics.SCALER_DEFAULT_SECURE_IMAGE_SIZE);
1113         }
1114 
1115         // for each resolution, test imageReader:
1116         for (Size sz : availableSizes) {
1117             try {
1118                 // For secure mode test only test default secure data size if HAL advertises one.
1119                 if (secureDataSize != null && !secureDataSize.equals(sz)) {
1120                     continue;
1121                 }
1122 
1123                 if (VERBOSE) {
1124                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
1125                             + " for camera " + mCamera.getId());
1126                 }
1127 
1128                 // Create ImageReader.
1129                 mListener  = new SimpleImageListener();
1130                 if (setUsageFlag) {
1131                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, usageFlag, mListener);
1132                 } else {
1133                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1134                 }
1135 
1136                 // Don't queue up images if we won't validate them
1137                 if (!validateImageData) {
1138                     ImageDropperListener imageDropperListener = new ImageDropperListener();
1139                     mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1140                 }
1141 
1142                 if (checkSession) {
1143                     checkImageReaderSessionConfiguration(
1144                             "Camera capture session validation for format: " + format + "failed",
1145                             physicalId);
1146                 }
1147 
1148                 ArrayList<OutputConfiguration> outputConfigs = new ArrayList<>();
1149                 OutputConfiguration config = new OutputConfiguration(mReader.getSurface());
1150                 if (physicalId != null) {
1151                     config.setPhysicalCameraId(physicalId);
1152                 }
1153                 outputConfigs.add(config);
1154                 CaptureRequest request = prepareCaptureRequestForConfigs(
1155                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW).build();
1156 
1157                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1158                 startCapture(request, repeating, listener, mHandler);
1159 
1160                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
1161 
1162                 if (validateImageData) {
1163                     // Validate images.
1164                     validateImage(sz, format, numFrameVerified, repeating);
1165                 }
1166 
1167                 // Validate capture result.
1168                 validateCaptureResult(format, sz, listener, numFrameVerified);
1169 
1170                 // stop capture.
1171                 stopCapture(/*fast*/false);
1172             } finally {
1173                 closeDefaultImageReader();
1174             }
1175 
1176         }
1177     }
1178 
bufferFormatLongProcessingTimeTestByCamera(int format)1179     private void bufferFormatLongProcessingTimeTestByCamera(int format)
1180             throws Exception {
1181 
1182         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
1183         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
1184         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
1185 
1186         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
1187                 StaticMetadata.StreamDirection.Output);
1188 
1189         // for each resolution, test imageReader:
1190         for (Size sz : availableSizes) {
1191             Log.v(TAG, "testing size " + sz.toString());
1192             try {
1193                 if (VERBOSE) {
1194                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
1195                             format + " for camera " + mCamera.getId());
1196                 }
1197 
1198                 // Create ImageReader.
1199                 mListener  = new SimpleImageListener();
1200                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1201 
1202                 // Setting manual controls
1203                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1204                 outputSurfaces.add(mReader.getSurface());
1205                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
1206                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
1207 
1208                 requestBuilder.set(
1209                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
1210                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1211                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
1212                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
1213                         CaptureRequest.CONTROL_AE_MODE_OFF);
1214                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1215                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1216                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
1217                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
1218 
1219                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1220                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
1221 
1222                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
1223                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1224 
1225                     // Verify image.
1226                     Image img = mReader.acquireNextImage();
1227                     assertNotNull("Unable to acquire next image", img);
1228                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1229                             mDebugFileNameBase);
1230 
1231                     // Verify the exposure time and iso match the requested values.
1232                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1233 
1234                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
1235                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1236                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
1237                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1238 
1239                     mCollector.expectTrue(
1240                             String.format("Long processing frame %d format %d size %s " +
1241                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
1242                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
1243                                     TEST_EXPOSURE_TIME_NS),
1244                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
1245                             exposureTimeDiff >= 0);
1246 
1247                     mCollector.expectTrue(
1248                             String.format("Long processing frame %d format %d size %s " +
1249                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
1250                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
1251                                     TEST_SENSITIVITY_VALUE),
1252                             sensitivityDiff >= 0);
1253 
1254 
1255                     // Sleep to Simulate long porcessing before closing the image.
1256                     Thread.sleep(LONG_PROCESS_TIME_MS);
1257                     img.close();
1258                 }
1259                 // Stop capture.
1260                 // Drain the reader queue in case the full queue blocks
1261                 // HAL from delivering new results
1262                 ImageDropperListener imageDropperListener = new ImageDropperListener();
1263                 mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1264                 Image img = mReader.acquireLatestImage();
1265                 if (img != null) {
1266                     img.close();
1267                 }
1268                 stopCapture(/*fast*/false);
1269             } finally {
1270                 closeDefaultImageReader();
1271             }
1272         }
1273     }
1274 
1275     /**
1276      * Validate capture results.
1277      *
1278      * @param format The format of this capture.
1279      * @param size The capture size.
1280      * @param listener The capture listener to get capture result callbacks.
1281      */
validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, int numFrameVerified)1282     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
1283             int numFrameVerified) {
1284         for (int i = 0; i < numFrameVerified; i++) {
1285             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1286 
1287             // TODO: Update this to use availableResultKeys once shim supports this.
1288             if (mStaticInfo.isCapabilitySupported(
1289                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1290                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1291                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1292                 mCollector.expectInRange(
1293                         String.format(
1294                                 "Capture for format %d, size %s exposure time is invalid.",
1295                                 format, size.toString()),
1296                         exposureTime,
1297                         mStaticInfo.getExposureMinimumOrDefault(),
1298                         mStaticInfo.getExposureMaximumOrDefault()
1299                 );
1300                 mCollector.expectInRange(
1301                         String.format("Capture for format %d, size %s sensitivity is invalid.",
1302                                 format, size.toString()),
1303                         sensitivity,
1304                         mStaticInfo.getSensitivityMinimumOrDefault(),
1305                         mStaticInfo.getSensitivityMaximumOrDefault()
1306                 );
1307             }
1308             // TODO: add more key validations.
1309         }
1310     }
1311 
1312     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
1313         private final ConditionVariable imageAvailable = new ConditionVariable();
1314         @Override
onImageAvailable(ImageReader reader)1315         public void onImageAvailable(ImageReader reader) {
1316             if (mReader != reader) {
1317                 return;
1318             }
1319 
1320             if (VERBOSE) Log.v(TAG, "new image available");
1321             imageAvailable.open();
1322         }
1323 
waitForAnyImageAvailable(long timeout)1324         public void waitForAnyImageAvailable(long timeout) {
1325             if (imageAvailable.block(timeout)) {
1326                 imageAvailable.close();
1327             } else {
1328                 fail("wait for image available timed out after " + timeout + "ms");
1329             }
1330         }
1331 
closePendingImages()1332         public void closePendingImages() {
1333             Image image = mReader.acquireLatestImage();
1334             if (image != null) {
1335                 image.close();
1336             }
1337         }
1338 
reset()1339         public void reset() {
1340             imageAvailable.close();
1341         }
1342     }
1343 
validateImage(Size sz, int format, int captureCount, boolean repeating)1344     private void validateImage(Size sz, int format, int captureCount,  boolean repeating)
1345             throws Exception {
1346         // TODO: Add more format here, and wrap each one as a function.
1347         Image img;
1348         final int MAX_RETRY_COUNT = 20;
1349         int numImageVerified = 0;
1350         int reTryCount = 0;
1351         while (numImageVerified < captureCount) {
1352             assertNotNull("Image listener is null", mListener);
1353             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
1354             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1355             if (repeating) {
1356                 /**
1357                  * Acquire the latest image in case the validation is slower than
1358                  * the image producing rate.
1359                  */
1360                 img = mReader.acquireLatestImage();
1361                 /**
1362                  * Sometimes if multiple onImageAvailable callbacks being queued,
1363                  * acquireLatestImage will clear all buffer before corresponding callback is
1364                  * executed. Wait for a new frame in that case.
1365                  */
1366                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
1367                     reTryCount++;
1368                     continue;
1369                 }
1370             } else {
1371                 img = mReader.acquireNextImage();
1372             }
1373             assertNotNull("Unable to acquire the latest image", img);
1374             if (VERBOSE) Log.v(TAG, "Got the latest image");
1375             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1376                     mDebugFileNameBase);
1377             HardwareBuffer hwb = img.getHardwareBuffer();
1378             assertNotNull("Unable to retrieve the Image's HardwareBuffer", hwb);
1379             if (format == ImageFormat.DEPTH_JPEG) {
1380                 byte [] dynamicDepthBuffer = CameraTestUtils.getDataFromImage(img);
1381                 assertTrue("Dynamic depth validation failed!",
1382                         validateDynamicDepthNative(dynamicDepthBuffer));
1383             }
1384             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
1385             img.close();
1386             numImageVerified++;
1387             reTryCount = 0;
1388         }
1389 
1390         // Return all pending images to the ImageReader as the validateImage may
1391         // take a while to return and there could be many images pending.
1392         mListener.closePendingImages();
1393     }
1394 
1395     /** Load dynamic depth validation jni on initialization */
1396     static {
1397         System.loadLibrary("ctscamera2_jni");
1398     }
1399     /**
1400      * Use the dynamic depth SDK to validate a dynamic depth file stored in the buffer.
1401      *
1402      * Returns false if the dynamic depth has validation errors. Validation warnings/errors
1403      * will be printed to logcat.
1404      */
validateDynamicDepthNative(byte[] dynamicDepthBuffer)1405     private static native boolean validateDynamicDepthNative(byte[] dynamicDepthBuffer);
1406 }
1407