1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.graphics.ImageFormat.YUV_420_888;
20 import static android.hardware.camera2.cts.helpers.Preconditions.*;
21 import static android.hardware.camera2.cts.helpers.AssertHelpers.*;
22 import static android.hardware.camera2.cts.CameraTestUtils.*;
23 import static com.android.ex.camera2.blocking.BlockingStateCallback.*;
24 
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.graphics.RectF;
28 import android.hardware.camera2.CameraAccessException;
29 import android.hardware.camera2.CameraCaptureSession;
30 import android.hardware.camera2.CameraCharacteristics;
31 import android.hardware.camera2.CameraDevice;
32 import android.hardware.camera2.CameraManager;
33 import android.hardware.camera2.CameraMetadata;
34 import android.hardware.camera2.CaptureRequest;
35 import android.hardware.camera2.CaptureResult;
36 import android.hardware.camera2.TotalCaptureResult;
37 import android.hardware.camera2.params.ColorSpaceTransform;
38 import android.hardware.camera2.params.RggbChannelVector;
39 import android.hardware.camera2.params.StreamConfigurationMap;
40 import android.util.Size;
41 import android.hardware.camera2.cts.helpers.MaybeNull;
42 import android.hardware.camera2.cts.helpers.StaticMetadata;
43 import android.hardware.camera2.cts.rs.RenderScriptSingleton;
44 import android.hardware.camera2.cts.rs.ScriptGraph;
45 import android.hardware.camera2.cts.rs.ScriptYuvCrop;
46 import android.hardware.camera2.cts.rs.ScriptYuvMeans1d;
47 import android.hardware.camera2.cts.rs.ScriptYuvMeans2dTo1d;
48 import android.hardware.camera2.cts.rs.ScriptYuvToRgb;
49 import android.os.Handler;
50 import android.os.HandlerThread;
51 import android.platform.test.annotations.AppModeFull;
52 import android.renderscript.Allocation;
53 import android.renderscript.Script.LaunchOptions;
54 import android.test.AndroidTestCase;
55 import android.util.Log;
56 import android.util.Rational;
57 import android.view.Surface;
58 
59 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
60 import com.android.ex.camera2.blocking.BlockingStateCallback;
61 import com.android.ex.camera2.blocking.BlockingSessionCallback;
62 
63 import java.util.ArrayList;
64 import java.util.Arrays;
65 import java.util.List;
66 
67 /**
68  * Suite of tests for camera2 -> RenderScript APIs.
69  *
70  * <p>It uses CameraDevice as producer, camera sends the data to the surface provided by
71  * Allocation. Only the below format is tested:</p>
72  *
73  * <p>YUV_420_888: flexible YUV420, it is a mandatory format for camera.</p>
74  */
75 @AppModeFull
76 public class AllocationTest extends AndroidTestCase {
77     private static final String TAG = "AllocationTest";
78     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
79 
80     private CameraManager mCameraManager;
81     private CameraDevice mCamera;
82     private CameraCaptureSession mSession;
83     private BlockingStateCallback mCameraListener;
84     private BlockingSessionCallback mSessionListener;
85 
86     private String[] mCameraIds;
87 
88     private Handler mHandler;
89     private HandlerThread mHandlerThread;
90 
91     private CameraIterable mCameraIterable;
92     private SizeIterable mSizeIterable;
93     private ResultIterable mResultIterable;
94 
95     @Override
setContext(Context context)96     public synchronized void setContext(Context context) {
97         super.setContext(context);
98         mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
99         assertNotNull("Can't connect to camera manager!", mCameraManager);
100     }
101 
102     @Override
setUp()103     protected void setUp() throws Exception {
104         super.setUp();
105         mCameraIds = mCameraManager.getCameraIdList();
106         mHandlerThread = new HandlerThread("AllocationTest");
107         mHandlerThread.start();
108         mHandler = new Handler(mHandlerThread.getLooper());
109         mCameraListener = new BlockingStateCallback();
110 
111         mCameraIterable = new CameraIterable();
112         mSizeIterable = new SizeIterable();
113         mResultIterable = new ResultIterable();
114 
115         RenderScriptSingleton.setContext(getContext());
116     }
117 
118     @Override
tearDown()119     protected void tearDown() throws Exception {
120         MaybeNull.close(mCamera);
121         RenderScriptSingleton.clearContext();
122         mHandlerThread.quitSafely();
123         mHandler = null;
124         super.tearDown();
125     }
126 
127     /**
128      * Update the request with a default manual request template.
129      *
130      * @param request A builder for a CaptureRequest
131      * @param sensitivity ISO gain units (e.g. 100)
132      * @param expTimeNs Exposure time in nanoseconds
133      */
setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity, long expTimeNs)134     private static void setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity,
135             long expTimeNs) {
136         final Rational ONE = new Rational(1, 1);
137         final Rational ZERO = new Rational(0, 1);
138 
139         if (VERBOSE) {
140             Log.v(TAG, String.format("Create manual capture request, sensitivity = %d, expTime = %f",
141                     sensitivity, expTimeNs / (1000.0 * 1000)));
142         }
143 
144         request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
145         request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
146         request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_OFF);
147         request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
148         request.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureRequest.CONTROL_EFFECT_MODE_OFF);
149         request.set(CaptureRequest.SENSOR_FRAME_DURATION, 0L);
150         request.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
151         request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTimeNs);
152         request.set(CaptureRequest.COLOR_CORRECTION_MODE,
153                 CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX);
154 
155         // Identity transform
156         request.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM,
157             new ColorSpaceTransform(new Rational[] {
158                 ONE, ZERO, ZERO,
159                 ZERO, ONE, ZERO,
160                 ZERO, ZERO, ONE
161             }));
162 
163         // Identity gains
164         request.set(CaptureRequest.COLOR_CORRECTION_GAINS,
165                 new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f ));
166         request.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST);
167     }
168 
169     /**
170      * Calculate the absolute crop window from a {@link Size},
171      * and configure {@link LaunchOptions} for it.
172      */
173     // TODO: split patch crop window and the application against a particular size into 2 classes
174     public static class Patch {
175         /**
176          * Create a new {@link Patch} from relative crop coordinates.
177          *
178          * <p>All float values must be normalized coordinates between [0, 1].</p>
179          *
180          * @param size Size of the original rectangle that is being cropped.
181          * @param xNorm The X coordinate defining the left side of the rectangle (in [0, 1]).
182          * @param yNorm The Y coordinate defining the top side of the rectangle (in [0, 1]).
183          * @param wNorm The width of the crop rectangle (normalized between [0, 1]).
184          * @param hNorm The height of the crop rectangle (normalized between [0, 1]).
185          *
186          * @throws NullPointerException if size was {@code null}.
187          * @throws AssertionError if any of the normalized coordinates were out of range
188          */
Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm)189         public Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm) {
190             checkNotNull("size", size);
191 
192             assertInRange(xNorm, 0.0f, 1.0f);
193             assertInRange(yNorm, 0.0f, 1.0f);
194             assertInRange(wNorm, 0.0f, 1.0f);
195             assertInRange(hNorm, 0.0f, 1.0f);
196 
197             wFull = size.getWidth();
198             hFull = size.getWidth();
199 
200             xTile = (int)Math.ceil(xNorm * wFull);
201             yTile = (int)Math.ceil(yNorm * hFull);
202 
203             wTile = (int)Math.ceil(wNorm * wFull);
204             hTile = (int)Math.ceil(hNorm * hFull);
205 
206             mSourceSize = size;
207         }
208 
209         /**
210          * Get the original size used to create this {@link Patch}.
211          *
212          * @return source size
213          */
getSourceSize()214         public Size getSourceSize() {
215             return mSourceSize;
216         }
217 
218         /**
219          * Get the cropped size after applying the normalized crop window.
220          *
221          * @return cropped size
222          */
getSize()223         public Size getSize() {
224             return new Size(wFull, hFull);
225         }
226 
227         /**
228          * Get the {@link LaunchOptions} that can be used with a {@link android.renderscript.Script}
229          * to apply a kernel over a subset of an {@link Allocation}.
230          *
231          * @return launch options
232          */
getLaunchOptions()233         public LaunchOptions getLaunchOptions() {
234             return (new LaunchOptions())
235                     .setX(xTile, xTile + wTile)
236                     .setY(yTile, yTile + hTile);
237         }
238 
239         /**
240          * Get the cropped width after applying the normalized crop window.
241          *
242          * @return cropped width
243          */
getWidth()244         public int getWidth() {
245             return wTile;
246         }
247 
248         /**
249          * Get the cropped height after applying the normalized crop window.
250          *
251          * @return cropped height
252          */
getHeight()253         public int getHeight() {
254             return hTile;
255         }
256 
257         /**
258          * Convert to a {@link RectF} where each corner is represented by a
259          * normalized coordinate in between [0.0, 1.0] inclusive.
260          *
261          * @return a new rectangle
262          */
toRectF()263         public RectF toRectF() {
264             return new RectF(
265                     xTile * 1.0f / wFull,
266                     yTile * 1.0f / hFull,
267                     (xTile + wTile) * 1.0f / wFull,
268                     (yTile + hTile) * 1.0f / hFull);
269         }
270 
271         private final Size mSourceSize;
272         private final int wFull;
273         private final int hFull;
274         private final int xTile;
275         private final int yTile;
276         private final int wTile;
277         private final int hTile;
278     }
279 
280     /**
281      * Convert a single YUV pixel (3 byte elements) to an RGB pixel.
282      *
283      * <p>The color channels must be in the following order:
284      * <ul><li>Y - 0th channel
285      * <li>U - 1st channel
286      * <li>V - 2nd channel
287      * </ul></p>
288      *
289      * <p>Each channel has data in the range 0-255.</p>
290      *
291      * <p>Output data is a 3-element pixel with each channel in the range of [0,1].
292      * Each channel is saturated to avoid over/underflow.</p>
293      *
294      * <p>The conversion is done using JFIF File Interchange Format's "Conversion to and from RGB":
295      * <ul>
296      * <li>R = Y + 1.042 (Cr - 128)
297      * <li>G = Y - 0.34414 (Cb - 128) - 0.71414 (Cr - 128)
298      * <li>B = Y + 1.772 (Cb - 128)
299      * </ul>
300      *
301      * Where Cr and Cb are aliases of V and U respectively.
302      * </p>
303      *
304      * @param yuvData An array of a YUV pixel (at least 3 bytes large)
305      *
306      * @return an RGB888 pixel with each channel in the range of [0,1]
307      */
convertPixelYuvToRgb(byte[] yuvData)308     private static float[] convertPixelYuvToRgb(byte[] yuvData) {
309         final int CHANNELS = 3; // yuv
310         final float COLOR_RANGE = 255f;
311 
312         assertTrue("YUV pixel must be at least 3 bytes large", CHANNELS <= yuvData.length);
313 
314         float[] rgb = new float[CHANNELS];
315 
316         float y = yuvData[0] & 0xFF;  // Y channel
317         float cb = yuvData[1] & 0xFF; // U channel
318         float cr = yuvData[2] & 0xFF; // V channel
319 
320         // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
321         float r = y + 1.402f * (cr - 128);
322         float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
323         float b = y + 1.772f * (cb - 128);
324 
325         // normalize [0,255] -> [0,1]
326         rgb[0] = r / COLOR_RANGE;
327         rgb[1] = g / COLOR_RANGE;
328         rgb[2] = b / COLOR_RANGE;
329 
330         // Clamp to range [0,1]
331         for (int i = 0; i < CHANNELS; ++i) {
332             rgb[i] = Math.max(0.0f, Math.min(1.0f, rgb[i]));
333         }
334 
335         if (VERBOSE) {
336             Log.v(TAG, String.format("RGB calculated (r,g,b) = (%f, %f, %f)", rgb[0], rgb[1],
337                     rgb[2]));
338         }
339 
340         return rgb;
341     }
342 
343     /**
344      * Configure the camera with the target surface;
345      * create a capture request builder with {@code cameraTarget} as the sole surface target.
346      *
347      * <p>Outputs are configured with the new surface targets, and this function blocks until
348      * the camera has finished configuring.</p>
349      *
350      * <p>The capture request is created from the {@link CameraDevice#TEMPLATE_PREVIEW} template.
351      * No other keys are set.
352      * </p>
353      */
configureAndCreateRequestForSurface(Surface cameraTarget)354     private CaptureRequest.Builder configureAndCreateRequestForSurface(Surface cameraTarget)
355             throws CameraAccessException {
356         List<Surface> outputSurfaces = new ArrayList<Surface>(/*capacity*/1);
357         assertNotNull("Failed to get Surface", cameraTarget);
358         outputSurfaces.add(cameraTarget);
359 
360         mSessionListener = new BlockingSessionCallback();
361         mCamera.createCaptureSession(outputSurfaces, mSessionListener, mHandler);
362         mSession = mSessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
363         CaptureRequest.Builder captureBuilder =
364                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
365         assertNotNull("Fail to create captureRequest", captureBuilder);
366         captureBuilder.addTarget(cameraTarget);
367 
368         if (VERBOSE) Log.v(TAG, "configureAndCreateRequestForSurface - done");
369 
370         return captureBuilder;
371     }
372 
373     /**
374      * Submit a single request to the camera, block until the buffer is available.
375      *
376      * <p>Upon return from this function, script has been executed against the latest buffer.
377      * </p>
378      */
captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph)379     private void captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph)
380             throws CameraAccessException {
381         checkNotNull("request", request);
382         checkNotNull("graph", graph);
383 
384         long exposureTimeNs = -1;
385         int controlMode = -1;
386         int aeMode = -1;
387         if (request.get(CaptureRequest.CONTROL_MODE) != null) {
388             controlMode = request.get(CaptureRequest.CONTROL_MODE);
389         }
390         if (request.get(CaptureRequest.CONTROL_AE_MODE) != null) {
391             aeMode = request.get(CaptureRequest.CONTROL_AE_MODE);
392         }
393         if ((request.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null) &&
394                 ((controlMode == CaptureRequest.CONTROL_MODE_OFF) ||
395                  (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF))) {
396             exposureTimeNs = request.get(CaptureRequest.SENSOR_EXPOSURE_TIME);
397         }
398         mSession.capture(request, new CameraCaptureSession.CaptureCallback() {
399             @Override
400             public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
401                     TotalCaptureResult result) {
402                 if (VERBOSE) Log.v(TAG, "Capture completed");
403             }
404         }, mHandler);
405 
406         if (VERBOSE) Log.v(TAG, "Waiting for single shot buffer");
407         if (exposureTimeNs > 0) {
408             graph.advanceInputWaiting(
409                     java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(exposureTimeNs));
410         } else {
411             graph.advanceInputWaiting();
412         }
413         if (VERBOSE) Log.v(TAG, "Got the buffer");
414         graph.execute();
415     }
416 
stopCapture()417     private void stopCapture() throws CameraAccessException {
418         if (VERBOSE) Log.v(TAG, "Stopping capture and waiting for idle");
419         // Stop repeat, wait for captures to complete, and disconnect from surfaces
420         mSession.close();
421         mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_CLOSED,
422                 SESSION_CLOSE_TIMEOUT_MS);
423         mSession = null;
424         mSessionListener = null;
425     }
426 
427     /**
428      * Extremely dumb validator. Makes sure there is at least one non-zero RGB pixel value.
429      */
validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size)430     private void validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size) {
431         final int BPP = 8; // bits per pixel
432 
433         int width = size.getWidth();
434         int height = size.getHeight();
435         /**
436          * Check the input allocation is sane.
437          * - Byte size matches what we expect.
438          * - The input is not all zeroes.
439          */
440 
441         // Check that input data was updated first. If it wasn't, the rest of the test will fail.
442         byte[] data = scriptGraph.getInputData();
443         assertArrayNotAllZeroes("Input allocation data was not updated", data);
444 
445         // Minimal required size to represent YUV 4:2:0 image
446         int packedSize =
447                 width * height * ImageFormat.getBitsPerPixel(YUV_420_888) / BPP;
448         if (VERBOSE) Log.v(TAG, "Expected image size = " + packedSize);
449         int actualSize = data.length;
450         // Actual size may be larger due to strides or planes being non-contiguous
451         assertTrue(
452                 String.format(
453                         "YUV 420 packed size (%d) should be at least as large as the actual size " +
454                         "(%d)", packedSize, actualSize), packedSize <= actualSize);
455         /**
456          * Check the output allocation by converting to RGBA.
457          * - Byte size matches what we expect
458          * - The output is not all zeroes
459          */
460         final int RGBA_CHANNELS = 4;
461 
462         int actualSizeOut = scriptGraph.getOutputAllocation().getBytesSize();
463         int packedSizeOut = width * height * RGBA_CHANNELS;
464 
465         byte[] dataOut = scriptGraph.getOutputData();
466         assertEquals("RGB mismatched byte[] and expected size",
467                 packedSizeOut, dataOut.length);
468 
469         if (VERBOSE) {
470             Log.v(TAG, "checkAllocationByConvertingToRgba - RGB data size " + dataOut.length);
471         }
472 
473         assertArrayNotAllZeroes("RGBA data was not updated", dataOut);
474         // RGBA8888 stride should be equal to the width
475         assertEquals("RGBA 8888 mismatched byte[] and expected size", packedSizeOut, actualSizeOut);
476 
477         if (VERBOSE) Log.v(TAG, "validating Buffer , size = " + actualSize);
478     }
479 
testAllocationFromCameraFlexibleYuv()480     public void testAllocationFromCameraFlexibleYuv() throws Exception {
481 
482         /** number of frame (for streaming requests) to be verified. */
483         final int NUM_FRAME_VERIFIED = 1;
484 
485         mCameraIterable.forEachCamera(new CameraBlock() {
486             @Override
487             public void run(CameraDevice camera) throws CameraAccessException {
488 
489                 // Iterate over each size in the camera
490                 mSizeIterable.forEachSize(YUV_420_888, new SizeBlock() {
491                     @Override
492                     public void run(final Size size) throws CameraAccessException {
493                         // Create a script graph that converts YUV to RGB
494                         try (ScriptGraph scriptGraph = ScriptGraph.create()
495                                 .configureInputWithSurface(size, YUV_420_888)
496                                 .chainScript(ScriptYuvToRgb.class)
497                                 .buildGraph()) {
498 
499                             if (VERBOSE) Log.v(TAG, "Prepared ScriptYuvToRgb for size " + size);
500 
501                             // Run the graph against camera input and validate we get some input
502                             CaptureRequest request =
503                                     configureAndCreateRequestForSurface(scriptGraph.getInputSurface()).build();
504 
505                             // Block until we get 1 result, then iterate over the result
506                             mResultIterable.forEachResultRepeating(
507                                     request, NUM_FRAME_VERIFIED, new ResultBlock() {
508                                 @Override
509                                 public void run(CaptureResult result) throws CameraAccessException {
510                                     scriptGraph.advanceInputWaiting();
511                                     scriptGraph.execute();
512                                     validateInputOutputNotZeroes(scriptGraph, size);
513                                     scriptGraph.advanceInputAndDrop();
514                                 }
515                             });
516 
517                             stopCapture();
518                             if (VERBOSE) Log.v(TAG, "Cleanup Renderscript cache");
519                             scriptGraph.close();
520                             RenderScriptSingleton.clearContext();
521                             RenderScriptSingleton.setContext(getContext());
522                         }
523                     }
524                 });
525             }
526         });
527     }
528 
529     /**
530      * Take two shots and ensure per-frame-control with exposure/gain is working correctly.
531      *
532      * <p>Takes a shot with very low ISO and exposure time. Expect it to be black.</p>
533      *
534      * <p>Take a shot with very high ISO and exposure time. Expect it to be white.</p>
535      *
536      * @throws Exception
537      */
testBlackWhite()538     public void testBlackWhite() throws CameraAccessException {
539 
540         /** low iso + low exposure (first shot) */
541         final float THRESHOLD_LOW = 0.025f;
542         /** high iso + high exposure (second shot) */
543         final float THRESHOLD_HIGH = 0.975f;
544 
545         mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
546             @Override
547             public void run(CameraDevice camera) throws CameraAccessException {
548                 final StaticMetadata staticInfo =
549                         new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
550 
551                 // This test requires PFC and manual sensor control
552                 if (!staticInfo.isCapabilitySupported(
553                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
554                         !staticInfo.isPerFrameControlSupported()) {
555                     return;
556                 }
557 
558                 final Size maxSize = getMaxSize(
559                         getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
560 
561                 try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) {
562 
563                     CaptureRequest.Builder req =
564                             configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
565 
566                     // Take a shot with very low ISO and exposure time. Expect it to be black.
567                     int minimumSensitivity = staticInfo.getSensitivityMinimumOrDefault();
568                     long minimumExposure = staticInfo.getExposureMinimumOrDefault();
569                     setManualCaptureRequest(req, minimumSensitivity, minimumExposure);
570 
571                     CaptureRequest lowIsoExposureShot = req.build();
572                     captureSingleShotAndExecute(lowIsoExposureShot, scriptGraph);
573 
574                     float[] blackMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
575 
576                     // Take a shot with very high ISO and exposure time. Expect it to be white.
577                     int maximumSensitivity = staticInfo.getSensitivityMaximumOrDefault();
578                     long maximumExposure = staticInfo.getExposureMaximumOrDefault();
579                     setManualCaptureRequest(req, maximumSensitivity, maximumExposure);
580 
581                     CaptureRequest highIsoExposureShot = req.build();
582                     captureSingleShotAndExecute(highIsoExposureShot, scriptGraph);
583 
584                     float[] whiteMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
585 
586                     // Low iso + low exposure (first shot), just check and log the error.
587                     for (int i = 0; i < blackMeans.length; ++i) {
588                         if (blackMeans[i] >= THRESHOLD_LOW) {
589                             Log.e(TAG,
590                                     String.format("Black means too high: (%s should be greater"
591                                             + " than %s; item index %d in %s)", blackMeans[i],
592                                             THRESHOLD_LOW, i,
593                                             Arrays.toString(blackMeans)));
594                         }
595                     }
596 
597                     // High iso + high exposure (second shot), just check and log the error
598                     for (int i = 0; i < whiteMeans.length; ++i) {
599                         if (whiteMeans[i] <= THRESHOLD_HIGH) {
600                             Log.e(TAG,
601                                     String.format("White means too low: (%s should be less than"
602                                             + " %s; item index %d in %s)", whiteMeans[i],
603                                             THRESHOLD_HIGH, i,
604                                             Arrays.toString(whiteMeans)));
605                         }
606                     }
607                 }
608             }
609         });
610     }
611 
612     /**
613      * Test that the android.sensitivity.parameter is applied.
614      */
testParamSensitivity()615     public void testParamSensitivity() throws CameraAccessException {
616         final float THRESHOLD_MAX_MIN_DIFF = 0.3f;
617         final float THRESHOLD_MAX_MIN_RATIO = 2.0f;
618         final int NUM_STEPS = 5;
619         final long EXPOSURE_TIME_NS = 2000000; // 2 ms
620         final int RGB_CHANNELS = 3;
621 
622         mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
623 
624 
625             @Override
626             public void run(CameraDevice camera) throws CameraAccessException {
627                 final StaticMetadata staticInfo =
628                         new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
629                 // This test requires PFC and manual sensor control
630                 if (!staticInfo.isCapabilitySupported(
631                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
632                         !staticInfo.isPerFrameControlSupported()) {
633                     return;
634                 }
635 
636                 final List<float[]> rgbMeans = new ArrayList<float[]>();
637                 final Size maxSize = getMaxSize(
638                         getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
639 
640                 final int sensitivityMin = staticInfo.getSensitivityMinimumOrDefault();
641                 final int sensitivityMax = staticInfo.getSensitivityMaximumOrDefault();
642 
643                 // List each sensitivity from min-max in NUM_STEPS increments
644                 int[] sensitivities = new int[NUM_STEPS];
645                 for (int i = 0; i < NUM_STEPS; ++i) {
646                     int delta = (sensitivityMax - sensitivityMin) / (NUM_STEPS - 1);
647                     sensitivities[i] = sensitivityMin + delta * i;
648                 }
649 
650                 try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) {
651 
652                     CaptureRequest.Builder req =
653                             configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
654 
655                     // Take burst shots with increasing sensitivity one after other.
656                     for (int i = 0; i < NUM_STEPS; ++i) {
657                         setManualCaptureRequest(req, sensitivities[i], EXPOSURE_TIME_NS);
658                         captureSingleShotAndExecute(req.build(), scriptGraph);
659                         float[] means = convertPixelYuvToRgb(scriptGraph.getOutputData());
660                         rgbMeans.add(means);
661 
662                         if (VERBOSE) {
663                             Log.v(TAG, "testParamSensitivity - captured image " + i +
664                                     " with RGB means: " + Arrays.toString(means));
665                         }
666                     }
667 
668                     // Test that every consecutive image gets brighter.
669                     for (int i = 0; i < rgbMeans.size() - 1; ++i) {
670                         float[] curMeans = rgbMeans.get(i);
671                         float[] nextMeans = rgbMeans.get(i+1);
672 
673                         float[] left = curMeans;
674                         float[] right = nextMeans;
675                         String leftString = Arrays.toString(left);
676                         String rightString = Arrays.toString(right);
677 
678                         String msgHeader =
679                                 String.format("Shot with sensitivity %d should not have higher " +
680                                 "average means than shot with sensitivity %d",
681                                 sensitivities[i], sensitivities[i+1]);
682                         for (int m = 0; m < left.length; ++m) {
683                             String msg = String.format(
684                                     "%s: (%s should be less than or equal to %s; item index %d;"
685                                     + " left = %s; right = %s)",
686                                     msgHeader, left[m], right[m], m, leftString, rightString);
687                             if (left[m] > right[m]) {
688                                 Log.e(TAG, msg);
689                             }
690                         }
691                     }
692 
693                     // Test the min-max diff and ratios are within expected thresholds
694                     float[] lastMeans = rgbMeans.get(NUM_STEPS - 1);
695                     float[] firstMeans = rgbMeans.get(/*location*/0);
696                     for (int i = 0; i < RGB_CHANNELS; ++i) {
697                         if (lastMeans[i] - firstMeans[i] <= THRESHOLD_MAX_MIN_DIFF) {
698                             Log.w(TAG, String.format("Sensitivity max-min diff too small"
699                                     + "(max=%f, min=%f)", lastMeans[i], firstMeans[i]));
700                         }
701                         if (lastMeans[i] / firstMeans[i] <= THRESHOLD_MAX_MIN_RATIO) {
702                             Log.w(TAG, String.format("Sensitivity max-min ratio too small"
703                                     + "(max=%f, min=%f)", lastMeans[i], firstMeans[i]));
704                         }
705                     }
706                 }
707             }
708         });
709 
710     }
711 
712     /**
713      * Common script graph for manual-capture based tests that determine the average pixel
714      * values of a cropped sub-region.
715      *
716      * <p>Processing chain:
717      *
718      * <pre>
719      * input:  YUV_420_888 surface
720      * output: mean YUV value of a central section of the image,
721      *         YUV 4:4:4 encoded as U8_3
722      * steps:
723      *      1) crop [0.45,0.45] - [0.55, 0.55]
724      *      2) average columns
725      *      3) average rows
726      * </pre>
727      * </p>
728      */
createGraphForYuvCroppedMeans(final Size size)729     private static ScriptGraph createGraphForYuvCroppedMeans(final Size size) {
730         ScriptGraph scriptGraph = ScriptGraph.create()
731                 .configureInputWithSurface(size, YUV_420_888)
732                 .configureScript(ScriptYuvCrop.class)
733                     .set(ScriptYuvCrop.CROP_WINDOW,
734                             new Patch(size, /*x*/0.45f, /*y*/0.45f, /*w*/0.1f, /*h*/0.1f).toRectF())
735                     .buildScript()
736                 .chainScript(ScriptYuvMeans2dTo1d.class)
737                 .chainScript(ScriptYuvMeans1d.class)
738                 // TODO: Make a script for YUV 444 -> RGB 888 conversion
739                 .buildGraph();
740         return scriptGraph;
741     }
742 
743     /*
744      * TODO: Refactor below code into separate classes and to not depend on AllocationTest
745      * inner variables.
746      *
747      * TODO: add javadocs to below methods
748      *
749      * TODO: Figure out if there's some elegant way to compose these forEaches together, so that
750      * the callers don't have to do a ton of nesting
751      */
752 
753     interface CameraBlock {
run(CameraDevice camera)754         void run(CameraDevice camera) throws CameraAccessException;
755     }
756 
757     class CameraIterable {
forEachCamera(CameraBlock runnable)758         public void forEachCamera(CameraBlock runnable)
759                 throws CameraAccessException {
760             forEachCamera(/*fullHwLevel*/false, runnable);
761         }
762 
forEachCamera(boolean fullHwLevel, CameraBlock runnable)763         public void forEachCamera(boolean fullHwLevel, CameraBlock runnable)
764                 throws CameraAccessException {
765             assertNotNull("No camera manager", mCameraManager);
766             assertNotNull("No camera IDs", mCameraIds);
767 
768             for (int i = 0; i < mCameraIds.length; i++) {
769                 // Don't execute the runnable against non-FULL cameras if FULL is required
770                 CameraCharacteristics properties =
771                         mCameraManager.getCameraCharacteristics(mCameraIds[i]);
772                 StaticMetadata staticInfo = new StaticMetadata(properties);
773                 if (fullHwLevel && !staticInfo.isHardwareLevelAtLeastFull()) {
774                     Log.i(TAG, String.format(
775                             "Skipping this test for camera %s, needs FULL hw level",
776                             mCameraIds[i]));
777                     continue;
778                 }
779                 if (!staticInfo.isColorOutputSupported()) {
780                     Log.i(TAG, String.format(
781                         "Skipping this test for camera %s, does not support regular outputs",
782                         mCameraIds[i]));
783                     continue;
784                 }
785                 // Open camera and execute test
786                 Log.i(TAG, "Testing Camera " + mCameraIds[i]);
787                 try {
788                     openDevice(mCameraIds[i]);
789 
790                     runnable.run(mCamera);
791                 } finally {
792                     closeDevice(mCameraIds[i]);
793                 }
794             }
795         }
796 
openDevice(String cameraId)797         private void openDevice(String cameraId) {
798             if (mCamera != null) {
799                 throw new IllegalStateException("Already have open camera device");
800             }
801             try {
802                 mCamera = openCamera(
803                     mCameraManager, cameraId, mCameraListener, mHandler);
804             } catch (CameraAccessException e) {
805                 fail("Fail to open camera synchronously, " + Log.getStackTraceString(e));
806             } catch (BlockingOpenException e) {
807                 fail("Fail to open camera asynchronously, " + Log.getStackTraceString(e));
808             }
809             mCameraListener.waitForState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
810         }
811 
closeDevice(String cameraId)812         private void closeDevice(String cameraId) {
813             if (mCamera != null) {
814                 mCamera.close();
815                 mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS);
816                 mCamera = null;
817             }
818         }
819     }
820 
821     interface SizeBlock {
run(Size size)822         void run(Size size) throws CameraAccessException;
823     }
824 
825     class SizeIterable {
forEachSize(int format, SizeBlock runnable)826         public void forEachSize(int format, SizeBlock runnable) throws CameraAccessException {
827             assertNotNull("No camera opened", mCamera);
828             assertNotNull("No camera manager", mCameraManager);
829 
830             CameraCharacteristics properties =
831                     mCameraManager.getCameraCharacteristics(mCamera.getId());
832 
833             assertNotNull("Can't get camera properties!", properties);
834 
835             StreamConfigurationMap config =
836                     properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
837             int[] availableOutputFormats = config.getOutputFormats();
838             assertArrayNotEmpty(availableOutputFormats,
839                     "availableOutputFormats should not be empty");
840             Arrays.sort(availableOutputFormats);
841             assertTrue("Can't find the format " + format + " in supported formats " +
842                     Arrays.toString(availableOutputFormats),
843                     Arrays.binarySearch(availableOutputFormats, format) >= 0);
844 
845             Size[] availableSizes = getSupportedSizeForFormat(format, mCamera.getId(),
846                     mCameraManager);
847             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty");
848 
849             for (Size size : availableSizes) {
850 
851                 if (VERBOSE) {
852                     Log.v(TAG, "Testing size " + size.toString() +
853                             " for camera " + mCamera.getId());
854                 }
855                 runnable.run(size);
856             }
857         }
858     }
859 
860     interface ResultBlock {
run(CaptureResult result)861         void run(CaptureResult result) throws CameraAccessException;
862     }
863 
864     class ResultIterable {
forEachResultOnce(CaptureRequest request, ResultBlock block)865         public void forEachResultOnce(CaptureRequest request, ResultBlock block)
866                 throws CameraAccessException {
867             forEachResult(request, /*count*/1, /*repeating*/false, block);
868         }
869 
forEachResultRepeating(CaptureRequest request, int count, ResultBlock block)870         public void forEachResultRepeating(CaptureRequest request, int count, ResultBlock block)
871                 throws CameraAccessException {
872             forEachResult(request, count, /*repeating*/true, block);
873         }
874 
forEachResult(CaptureRequest request, int count, boolean repeating, ResultBlock block)875         public void forEachResult(CaptureRequest request, int count, boolean repeating,
876                 ResultBlock block) throws CameraAccessException {
877 
878             // TODO: start capture, i.e. configureOutputs
879 
880             SimpleCaptureCallback listener = new SimpleCaptureCallback();
881 
882             if (!repeating) {
883                 for (int i = 0; i < count; ++i) {
884                     mSession.capture(request, listener, mHandler);
885                 }
886             } else {
887                 mSession.setRepeatingRequest(request, listener, mHandler);
888             }
889 
890             // Assume that the device is already IDLE.
891             mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_ACTIVE,
892                     CAMERA_ACTIVE_TIMEOUT_MS);
893 
894             for (int i = 0; i < count; ++i) {
895                 if (VERBOSE) {
896                     Log.v(TAG, String.format("Testing with result %d of %d for camera %s",
897                             i, count, mCamera.getId()));
898                 }
899 
900                 CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
901                 block.run(result);
902             }
903 
904             if (repeating) {
905                 mSession.stopRepeating();
906                 mSessionListener.getStateWaiter().waitForState(
907                     BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS);
908             }
909 
910             // TODO: Make a Configure decorator or some such for configureOutputs
911         }
912     }
913 }
914