1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.graphics.ImageFormat.YUV_420_888;
20 import static android.hardware.camera2.cts.helpers.Preconditions.*;
21 import static android.hardware.camera2.cts.helpers.AssertHelpers.*;
22 import static android.hardware.camera2.cts.CameraTestUtils.*;
23 import static com.android.ex.camera2.blocking.BlockingStateCallback.*;
24 
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.graphics.RectF;
28 import android.hardware.camera2.CameraAccessException;
29 import android.hardware.camera2.CameraCaptureSession;
30 import android.hardware.camera2.CameraCharacteristics;
31 import android.hardware.camera2.CameraDevice;
32 import android.hardware.camera2.CameraManager;
33 import android.hardware.camera2.CameraMetadata;
34 import android.hardware.camera2.CaptureRequest;
35 import android.hardware.camera2.CaptureResult;
36 import android.hardware.camera2.TotalCaptureResult;
37 import android.hardware.camera2.params.ColorSpaceTransform;
38 import android.hardware.camera2.params.RggbChannelVector;
39 import android.hardware.camera2.params.StreamConfigurationMap;
40 import android.util.Size;
41 import android.hardware.camera2.cts.helpers.MaybeNull;
42 import android.hardware.camera2.cts.helpers.StaticMetadata;
43 import android.hardware.camera2.cts.rs.RenderScriptSingleton;
44 import android.hardware.camera2.cts.rs.ScriptGraph;
45 import android.hardware.camera2.cts.rs.ScriptYuvCrop;
46 import android.hardware.camera2.cts.rs.ScriptYuvMeans1d;
47 import android.hardware.camera2.cts.rs.ScriptYuvMeans2dTo1d;
48 import android.hardware.camera2.cts.rs.ScriptYuvToRgb;
49 import android.os.Handler;
50 import android.os.HandlerThread;
51 import android.renderscript.Allocation;
52 import android.renderscript.Script.LaunchOptions;
53 import android.test.AndroidTestCase;
54 import android.util.Log;
55 import android.util.Rational;
56 import android.view.Surface;
57 
58 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
59 import com.android.ex.camera2.blocking.BlockingStateCallback;
60 import com.android.ex.camera2.blocking.BlockingSessionCallback;
61 
62 import java.util.ArrayList;
63 import java.util.Arrays;
64 import java.util.List;
65 
66 /**
67  * Suite of tests for camera2 -> RenderScript APIs.
68  *
69  * <p>It uses CameraDevice as producer, camera sends the data to the surface provided by
70  * Allocation. Only the below format is tested:</p>
71  *
72  * <p>YUV_420_888: flexible YUV420, it is a mandatory format for camera.</p>
73  */
74 public class AllocationTest extends AndroidTestCase {
75     private static final String TAG = "AllocationTest";
76     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
77 
78     private CameraManager mCameraManager;
79     private CameraDevice mCamera;
80     private CameraCaptureSession mSession;
81     private BlockingStateCallback mCameraListener;
82     private BlockingSessionCallback mSessionListener;
83 
84     private String[] mCameraIds;
85 
86     private Handler mHandler;
87     private HandlerThread mHandlerThread;
88 
89     private CameraIterable mCameraIterable;
90     private SizeIterable mSizeIterable;
91     private ResultIterable mResultIterable;
92 
93     @Override
setContext(Context context)94     public synchronized void setContext(Context context) {
95         super.setContext(context);
96         mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
97         assertNotNull("Can't connect to camera manager!", mCameraManager);
98 
99         RenderScriptSingleton.setContext(context);
100         // TODO: call clearContext
101     }
102 
103     @Override
setUp()104     protected void setUp() throws Exception {
105         super.setUp();
106         mCameraIds = mCameraManager.getCameraIdList();
107         mHandlerThread = new HandlerThread("AllocationTest");
108         mHandlerThread.start();
109         mHandler = new Handler(mHandlerThread.getLooper());
110         mCameraListener = new BlockingStateCallback();
111 
112         mCameraIterable = new CameraIterable();
113         mSizeIterable = new SizeIterable();
114         mResultIterable = new ResultIterable();
115     }
116 
117     @Override
tearDown()118     protected void tearDown() throws Exception {
119         MaybeNull.close(mCamera);
120 
121         // TODO: Clean up RenderScript context in a static test run finished method.
122         // Or alternatively count the # of test methods that are in this test,
123         // once we reach that count, it's time to call the last tear down
124 
125         mHandlerThread.quitSafely();
126         mHandler = null;
127         super.tearDown();
128     }
129 
130     /**
131      * Update the request with a default manual request template.
132      *
133      * @param request A builder for a CaptureRequest
134      * @param sensitivity ISO gain units (e.g. 100)
135      * @param expTimeNs Exposure time in nanoseconds
136      */
setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity, long expTimeNs)137     private static void setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity,
138             long expTimeNs) {
139         final Rational ONE = new Rational(1, 1);
140         final Rational ZERO = new Rational(0, 1);
141 
142         if (VERBOSE) {
143             Log.v(TAG, String.format("Create manual capture request, sensitivity = %d, expTime = %f",
144                     sensitivity, expTimeNs / (1000.0 * 1000)));
145         }
146 
147         request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
148         request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
149         request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_OFF);
150         request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
151         request.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureRequest.CONTROL_EFFECT_MODE_OFF);
152         request.set(CaptureRequest.SENSOR_FRAME_DURATION, 0L);
153         request.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
154         request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTimeNs);
155         request.set(CaptureRequest.COLOR_CORRECTION_MODE,
156                 CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX);
157 
158         // Identity transform
159         request.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM,
160             new ColorSpaceTransform(new Rational[] {
161                 ONE, ZERO, ZERO,
162                 ZERO, ONE, ZERO,
163                 ZERO, ZERO, ONE
164             }));
165 
166         // Identity gains
167         request.set(CaptureRequest.COLOR_CORRECTION_GAINS,
168                 new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f ));
169         request.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST);
170     }
171 
172     /**
173      * Calculate the absolute crop window from a {@link Size},
174      * and configure {@link LaunchOptions} for it.
175      */
176     // TODO: split patch crop window and the application against a particular size into 2 classes
177     public static class Patch {
178         /**
179          * Create a new {@link Patch} from relative crop coordinates.
180          *
181          * <p>All float values must be normalized coordinates between [0, 1].</p>
182          *
183          * @param size Size of the original rectangle that is being cropped.
184          * @param xNorm The X coordinate defining the left side of the rectangle (in [0, 1]).
185          * @param yNorm The Y coordinate defining the top side of the rectangle (in [0, 1]).
186          * @param wNorm The width of the crop rectangle (normalized between [0, 1]).
187          * @param hNorm The height of the crop rectangle (normalized between [0, 1]).
188          *
189          * @throws NullPointerException if size was {@code null}.
190          * @throws AssertionError if any of the normalized coordinates were out of range
191          */
Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm)192         public Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm) {
193             checkNotNull("size", size);
194 
195             assertInRange(xNorm, 0.0f, 1.0f);
196             assertInRange(yNorm, 0.0f, 1.0f);
197             assertInRange(wNorm, 0.0f, 1.0f);
198             assertInRange(hNorm, 0.0f, 1.0f);
199 
200             wFull = size.getWidth();
201             hFull = size.getWidth();
202 
203             xTile = (int)Math.ceil(xNorm * wFull);
204             yTile = (int)Math.ceil(yNorm * hFull);
205 
206             wTile = (int)Math.ceil(wNorm * wFull);
207             hTile = (int)Math.ceil(hNorm * hFull);
208 
209             mSourceSize = size;
210         }
211 
212         /**
213          * Get the original size used to create this {@link Patch}.
214          *
215          * @return source size
216          */
getSourceSize()217         public Size getSourceSize() {
218             return mSourceSize;
219         }
220 
221         /**
222          * Get the cropped size after applying the normalized crop window.
223          *
224          * @return cropped size
225          */
getSize()226         public Size getSize() {
227             return new Size(wFull, hFull);
228         }
229 
230         /**
231          * Get the {@link LaunchOptions} that can be used with a {@link android.renderscript.Script}
232          * to apply a kernel over a subset of an {@link Allocation}.
233          *
234          * @return launch options
235          */
getLaunchOptions()236         public LaunchOptions getLaunchOptions() {
237             return (new LaunchOptions())
238                     .setX(xTile, xTile + wTile)
239                     .setY(yTile, yTile + hTile);
240         }
241 
242         /**
243          * Get the cropped width after applying the normalized crop window.
244          *
245          * @return cropped width
246          */
getWidth()247         public int getWidth() {
248             return wTile;
249         }
250 
251         /**
252          * Get the cropped height after applying the normalized crop window.
253          *
254          * @return cropped height
255          */
getHeight()256         public int getHeight() {
257             return hTile;
258         }
259 
260         /**
261          * Convert to a {@link RectF} where each corner is represented by a
262          * normalized coordinate in between [0.0, 1.0] inclusive.
263          *
264          * @return a new rectangle
265          */
toRectF()266         public RectF toRectF() {
267             return new RectF(
268                     xTile * 1.0f / wFull,
269                     yTile * 1.0f / hFull,
270                     (xTile + wTile) * 1.0f / wFull,
271                     (yTile + hTile) * 1.0f / hFull);
272         }
273 
274         private final Size mSourceSize;
275         private final int wFull;
276         private final int hFull;
277         private final int xTile;
278         private final int yTile;
279         private final int wTile;
280         private final int hTile;
281     }
282 
283     /**
284      * Convert a single YUV pixel (3 byte elements) to an RGB pixel.
285      *
286      * <p>The color channels must be in the following order:
287      * <ul><li>Y - 0th channel
288      * <li>U - 1st channel
289      * <li>V - 2nd channel
290      * </ul></p>
291      *
292      * <p>Each channel has data in the range 0-255.</p>
293      *
294      * <p>Output data is a 3-element pixel with each channel in the range of [0,1].
295      * Each channel is saturated to avoid over/underflow.</p>
296      *
297      * <p>The conversion is done using JFIF File Interchange Format's "Conversion to and from RGB":
298      * <ul>
299      * <li>R = Y + 1.042 (Cr - 128)
300      * <li>G = Y - 0.34414 (Cb - 128) - 0.71414 (Cr - 128)
301      * <li>B = Y + 1.772 (Cb - 128)
302      * </ul>
303      *
304      * Where Cr and Cb are aliases of V and U respectively.
305      * </p>
306      *
307      * @param yuvData An array of a YUV pixel (at least 3 bytes large)
308      *
309      * @return an RGB888 pixel with each channel in the range of [0,1]
310      */
convertPixelYuvToRgb(byte[] yuvData)311     private static float[] convertPixelYuvToRgb(byte[] yuvData) {
312         final int CHANNELS = 3; // yuv
313         final float COLOR_RANGE = 255f;
314 
315         assertTrue("YUV pixel must be at least 3 bytes large", CHANNELS <= yuvData.length);
316 
317         float[] rgb = new float[CHANNELS];
318 
319         float y = yuvData[0] & 0xFF;  // Y channel
320         float cb = yuvData[1] & 0xFF; // U channel
321         float cr = yuvData[2] & 0xFF; // V channel
322 
323         // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
324         float r = y + 1.402f * (cr - 128);
325         float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
326         float b = y + 1.772f * (cb - 128);
327 
328         // normalize [0,255] -> [0,1]
329         rgb[0] = r / COLOR_RANGE;
330         rgb[1] = g / COLOR_RANGE;
331         rgb[2] = b / COLOR_RANGE;
332 
333         // Clamp to range [0,1]
334         for (int i = 0; i < CHANNELS; ++i) {
335             rgb[i] = Math.max(0.0f, Math.min(1.0f, rgb[i]));
336         }
337 
338         if (VERBOSE) {
339             Log.v(TAG, String.format("RGB calculated (r,g,b) = (%f, %f, %f)", rgb[0], rgb[1],
340                     rgb[2]));
341         }
342 
343         return rgb;
344     }
345 
346     /**
347      * Configure the camera with the target surface;
348      * create a capture request builder with {@code cameraTarget} as the sole surface target.
349      *
350      * <p>Outputs are configured with the new surface targets, and this function blocks until
351      * the camera has finished configuring.</p>
352      *
353      * <p>The capture request is created from the {@link CameraDevice#TEMPLATE_PREVIEW} template.
354      * No other keys are set.
355      * </p>
356      */
configureAndCreateRequestForSurface(Surface cameraTarget)357     private CaptureRequest.Builder configureAndCreateRequestForSurface(Surface cameraTarget)
358             throws CameraAccessException {
359         List<Surface> outputSurfaces = new ArrayList<Surface>(/*capacity*/1);
360         assertNotNull("Failed to get Surface", cameraTarget);
361         outputSurfaces.add(cameraTarget);
362 
363         mSessionListener = new BlockingSessionCallback();
364         mCamera.createCaptureSession(outputSurfaces, mSessionListener, mHandler);
365         mSession = mSessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
366         CaptureRequest.Builder captureBuilder =
367                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
368         assertNotNull("Fail to create captureRequest", captureBuilder);
369         captureBuilder.addTarget(cameraTarget);
370 
371         if (VERBOSE) Log.v(TAG, "configureAndCreateRequestForSurface - done");
372 
373         return captureBuilder;
374     }
375 
376     /**
377      * Submit a single request to the camera, block until the buffer is available.
378      *
379      * <p>Upon return from this function, script has been executed against the latest buffer.
380      * </p>
381      */
captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph)382     private void captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph)
383             throws CameraAccessException {
384         checkNotNull("request", request);
385         checkNotNull("graph", graph);
386 
387         mSession.capture(request, new CameraCaptureSession.CaptureCallback() {
388             @Override
389             public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
390                     TotalCaptureResult result) {
391                 if (VERBOSE) Log.v(TAG, "Capture completed");
392             }
393         }, mHandler);
394 
395         if (VERBOSE) Log.v(TAG, "Waiting for single shot buffer");
396         graph.advanceInputWaiting();
397         if (VERBOSE) Log.v(TAG, "Got the buffer");
398         graph.execute();
399     }
400 
stopCapture()401     private void stopCapture() throws CameraAccessException {
402         if (VERBOSE) Log.v(TAG, "Stopping capture and waiting for idle");
403         // Stop repeat, wait for captures to complete, and disconnect from surfaces
404         mSession.close();
405         mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_CLOSED,
406                 SESSION_CLOSE_TIMEOUT_MS);
407         mSession = null;
408         mSessionListener = null;
409     }
410 
411     /**
412      * Extremely dumb validator. Makes sure there is at least one non-zero RGB pixel value.
413      */
validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size)414     private void validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size) {
415         final int BPP = 8; // bits per pixel
416 
417         int width = size.getWidth();
418         int height = size.getHeight();
419         /**
420          * Check the input allocation is sane.
421          * - Byte size matches what we expect.
422          * - The input is not all zeroes.
423          */
424 
425         // Check that input data was updated first. If it wasn't, the rest of the test will fail.
426         byte[] data = scriptGraph.getInputData();
427         assertArrayNotAllZeroes("Input allocation data was not updated", data);
428 
429         // Minimal required size to represent YUV 4:2:0 image
430         int packedSize =
431                 width * height * ImageFormat.getBitsPerPixel(YUV_420_888) / BPP;
432         if (VERBOSE) Log.v(TAG, "Expected image size = " + packedSize);
433         int actualSize = data.length;
434         // Actual size may be larger due to strides or planes being non-contiguous
435         assertTrue(
436                 String.format(
437                         "YUV 420 packed size (%d) should be at least as large as the actual size " +
438                         "(%d)", packedSize, actualSize), packedSize <= actualSize);
439         /**
440          * Check the output allocation by converting to RGBA.
441          * - Byte size matches what we expect
442          * - The output is not all zeroes
443          */
444         final int RGBA_CHANNELS = 4;
445 
446         int actualSizeOut = scriptGraph.getOutputAllocation().getBytesSize();
447         int packedSizeOut = width * height * RGBA_CHANNELS;
448 
449         byte[] dataOut = scriptGraph.getOutputData();
450         assertEquals("RGB mismatched byte[] and expected size",
451                 packedSizeOut, dataOut.length);
452 
453         if (VERBOSE) {
454             Log.v(TAG, "checkAllocationByConvertingToRgba - RGB data size " + dataOut.length);
455         }
456 
457         assertArrayNotAllZeroes("RGBA data was not updated", dataOut);
458         // RGBA8888 stride should be equal to the width
459         assertEquals("RGBA 8888 mismatched byte[] and expected size", packedSizeOut, actualSizeOut);
460 
461         if (VERBOSE) Log.v(TAG, "validating Buffer , size = " + actualSize);
462     }
463 
testAllocationFromCameraFlexibleYuv()464     public void testAllocationFromCameraFlexibleYuv() throws Exception {
465 
466         /** number of frame (for streaming requests) to be verified. */
467         final int NUM_FRAME_VERIFIED = 1;
468 
469         mCameraIterable.forEachCamera(new CameraBlock() {
470             @Override
471             public void run(CameraDevice camera) throws CameraAccessException {
472 
473                 // Iterate over each size in the camera
474                 mSizeIterable.forEachSize(YUV_420_888, new SizeBlock() {
475                     @Override
476                     public void run(final Size size) throws CameraAccessException {
477                         // Create a script graph that converts YUV to RGB
478                         final ScriptGraph scriptGraph = ScriptGraph.create()
479                                 .configureInputWithSurface(size, YUV_420_888)
480                                 .chainScript(ScriptYuvToRgb.class)
481                                 .buildGraph();
482 
483                         if (VERBOSE) Log.v(TAG, "Prepared ScriptYuvToRgb for size " + size);
484 
485                         // Run the graph against camera input and validate we get some input
486                         try {
487                             CaptureRequest request =
488                                     configureAndCreateRequestForSurface(scriptGraph.getInputSurface()).build();
489 
490                             // Block until we get 1 result, then iterate over the result
491                             mResultIterable.forEachResultRepeating(
492                                     request, NUM_FRAME_VERIFIED, new ResultBlock() {
493                                 @Override
494                                 public void run(CaptureResult result) throws CameraAccessException {
495                                     scriptGraph.advanceInputWaiting();
496                                     scriptGraph.execute();
497                                     validateInputOutputNotZeroes(scriptGraph, size);
498                                     scriptGraph.advanceInputAndDrop();
499                                 }
500                             });
501 
502                             stopCapture();
503                         } finally {
504                             scriptGraph.close();
505                         }
506                     }
507                 });
508             }
509         });
510     }
511 
512     /**
513      * Take two shots and ensure per-frame-control with exposure/gain is working correctly.
514      *
515      * <p>Takes a shot with very low ISO and exposure time. Expect it to be black.</p>
516      *
517      * <p>Take a shot with very high ISO and exposure time. Expect it to be white.</p>
518      *
519      * @throws Exception
520      */
testBlackWhite()521     public void testBlackWhite() throws CameraAccessException {
522 
523         /** low iso + low exposure (first shot) */
524         final float THRESHOLD_LOW = 0.025f;
525         /** high iso + high exposure (second shot) */
526         final float THRESHOLD_HIGH = 0.975f;
527 
528         mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
529             @Override
530             public void run(CameraDevice camera) throws CameraAccessException {
531                 final StaticMetadata staticInfo =
532                         new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
533 
534                 // This test requires PFC and manual sensor control
535                 if (!staticInfo.isCapabilitySupported(
536                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
537                         !staticInfo.isPerFrameControlSupported()) {
538                     return;
539                 }
540 
541                 final Size maxSize = getMaxSize(
542                         getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
543 
544                 ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize);
545 
546                 CaptureRequest.Builder req =
547                         configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
548 
549                 // Take a shot with very low ISO and exposure time. Expect it to be black.
550                 int minimumSensitivity = staticInfo.getSensitivityMinimumOrDefault();
551                 long minimumExposure = staticInfo.getExposureMinimumOrDefault();
552                 setManualCaptureRequest(req, minimumSensitivity, minimumExposure);
553 
554                 CaptureRequest lowIsoExposureShot = req.build();
555                 captureSingleShotAndExecute(lowIsoExposureShot, scriptGraph);
556 
557                 float[] blackMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
558 
559                 // Take a shot with very high ISO and exposure time. Expect it to be white.
560                 int maximumSensitivity = staticInfo.getSensitivityMaximumOrDefault();
561                 long maximumExposure = staticInfo.getExposureMaximumOrDefault();
562                 setManualCaptureRequest(req, maximumSensitivity, maximumExposure);
563 
564                 CaptureRequest highIsoExposureShot = req.build();
565                 captureSingleShotAndExecute(highIsoExposureShot, scriptGraph);
566 
567                 float[] whiteMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
568 
569                 // low iso + low exposure (first shot)
570                 assertArrayWithinUpperBound("Black means too high", blackMeans, THRESHOLD_LOW);
571 
572                 // high iso + high exposure (second shot)
573                 assertArrayWithinLowerBound("White means too low", whiteMeans, THRESHOLD_HIGH);
574             }
575         });
576     }
577 
578     /**
579      * Test that the android.sensitivity.parameter is applied.
580      */
testParamSensitivity()581     public void testParamSensitivity() throws CameraAccessException {
582         final float THRESHOLD_MAX_MIN_DIFF = 0.3f;
583         final float THRESHOLD_MAX_MIN_RATIO = 2.0f;
584         final int NUM_STEPS = 5;
585         final long EXPOSURE_TIME_NS = 2000000; // 2 seconds
586         final int RGB_CHANNELS = 3;
587 
588         mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
589 
590 
591             @Override
592             public void run(CameraDevice camera) throws CameraAccessException {
593                 final StaticMetadata staticInfo =
594                         new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
595                 // This test requires PFC and manual sensor control
596                 if (!staticInfo.isCapabilitySupported(
597                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
598                         !staticInfo.isPerFrameControlSupported()) {
599                     return;
600                 }
601 
602                 final List<float[]> rgbMeans = new ArrayList<float[]>();
603                 final Size maxSize = getMaxSize(
604                         getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
605 
606                 final int sensitivityMin = staticInfo.getSensitivityMinimumOrDefault();
607                 final int sensitivityMax = staticInfo.getSensitivityMaximumOrDefault();
608 
609                 // List each sensitivity from min-max in NUM_STEPS increments
610                 int[] sensitivities = new int[NUM_STEPS];
611                 for (int i = 0; i < NUM_STEPS; ++i) {
612                     int delta = (sensitivityMax - sensitivityMin) / (NUM_STEPS - 1);
613                     sensitivities[i] = sensitivityMin + delta * i;
614                 }
615 
616                 ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize);
617 
618                 CaptureRequest.Builder req =
619                         configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
620 
621                 // Take burst shots with increasing sensitivity one after other.
622                 for (int i = 0; i < NUM_STEPS; ++i) {
623                     setManualCaptureRequest(req, sensitivities[i], EXPOSURE_TIME_NS);
624                     captureSingleShotAndExecute(req.build(), scriptGraph);
625                     float[] means = convertPixelYuvToRgb(scriptGraph.getOutputData());
626                     rgbMeans.add(means);
627 
628                     if (VERBOSE) {
629                         Log.v(TAG, "testParamSensitivity - captured image " + i +
630                                 " with RGB means: " + Arrays.toString(means));
631                     }
632                 }
633 
634                 // Test that every consecutive image gets brighter.
635                 for (int i = 0; i < rgbMeans.size() - 1; ++i) {
636                     float[] curMeans = rgbMeans.get(i);
637                     float[] nextMeans = rgbMeans.get(i+1);
638 
639                     assertArrayNotGreater(
640                             String.format("Shot with sensitivity %d should not have higher " +
641                                     "average means than shot with sensitivity %d",
642                                     sensitivities[i], sensitivities[i+1]),
643                             curMeans, nextMeans);
644                 }
645 
646                 // Test the min-max diff and ratios are within expected thresholds
647                 float[] lastMeans = rgbMeans.get(NUM_STEPS - 1);
648                 float[] firstMeans = rgbMeans.get(/*location*/0);
649                 for (int i = 0; i < RGB_CHANNELS; ++i) {
650                     assertTrue(
651                             String.format("Sensitivity max-min diff too small (max=%f, min=%f)",
652                                     lastMeans[i], firstMeans[i]),
653                             lastMeans[i] - firstMeans[i] > THRESHOLD_MAX_MIN_DIFF);
654                     assertTrue(
655                             String.format("Sensitivity max-min ratio too small (max=%f, min=%f)",
656                                     lastMeans[i], firstMeans[i]),
657                             lastMeans[i] / firstMeans[i] > THRESHOLD_MAX_MIN_RATIO);
658                 }
659             }
660         });
661 
662     }
663 
664     /**
665      * Common script graph for manual-capture based tests that determine the average pixel
666      * values of a cropped sub-region.
667      *
668      * <p>Processing chain:
669      *
670      * <pre>
671      * input:  YUV_420_888 surface
672      * output: mean YUV value of a central section of the image,
673      *         YUV 4:4:4 encoded as U8_3
674      * steps:
675      *      1) crop [0.45,0.45] - [0.55, 0.55]
676      *      2) average columns
677      *      3) average rows
678      * </pre>
679      * </p>
680      */
createGraphForYuvCroppedMeans(final Size size)681     private static ScriptGraph createGraphForYuvCroppedMeans(final Size size) {
682         ScriptGraph scriptGraph = ScriptGraph.create()
683                 .configureInputWithSurface(size, YUV_420_888)
684                 .configureScript(ScriptYuvCrop.class)
685                     .set(ScriptYuvCrop.CROP_WINDOW,
686                             new Patch(size, /*x*/0.45f, /*y*/0.45f, /*w*/0.1f, /*h*/0.1f).toRectF())
687                     .buildScript()
688                 .chainScript(ScriptYuvMeans2dTo1d.class)
689                 .chainScript(ScriptYuvMeans1d.class)
690                 // TODO: Make a script for YUV 444 -> RGB 888 conversion
691                 .buildGraph();
692         return scriptGraph;
693     }
694 
695     /*
696      * TODO: Refactor below code into separate classes and to not depend on AllocationTest
697      * inner variables.
698      *
699      * TODO: add javadocs to below methods
700      *
701      * TODO: Figure out if there's some elegant way to compose these forEaches together, so that
702      * the callers don't have to do a ton of nesting
703      */
704 
705     interface CameraBlock {
run(CameraDevice camera)706         void run(CameraDevice camera) throws CameraAccessException;
707     }
708 
709     class CameraIterable {
forEachCamera(CameraBlock runnable)710         public void forEachCamera(CameraBlock runnable)
711                 throws CameraAccessException {
712             forEachCamera(/*fullHwLevel*/false, runnable);
713         }
714 
forEachCamera(boolean fullHwLevel, CameraBlock runnable)715         public void forEachCamera(boolean fullHwLevel, CameraBlock runnable)
716                 throws CameraAccessException {
717             assertNotNull("No camera manager", mCameraManager);
718             assertNotNull("No camera IDs", mCameraIds);
719 
720             for (int i = 0; i < mCameraIds.length; i++) {
721                 // Don't execute the runnable against non-FULL cameras if FULL is required
722                 CameraCharacteristics properties =
723                         mCameraManager.getCameraCharacteristics(mCameraIds[i]);
724                 StaticMetadata staticInfo = new StaticMetadata(properties);
725                 if (fullHwLevel && !staticInfo.isHardwareLevelFull()) {
726                     Log.i(TAG, String.format(
727                             "Skipping this test for camera %s, needs FULL hw level",
728                             mCameraIds[i]));
729                     continue;
730                 }
731                 if (!staticInfo.isColorOutputSupported()) {
732                     Log.i(TAG, String.format(
733                         "Skipping this test for camera %s, does not support regular outputs",
734                         mCameraIds[i]));
735                     continue;
736                 }
737                 // Open camera and execute test
738                 Log.i(TAG, "Testing Camera " + mCameraIds[i]);
739                 try {
740                     openDevice(mCameraIds[i]);
741 
742                     runnable.run(mCamera);
743                 } finally {
744                     closeDevice(mCameraIds[i]);
745                 }
746             }
747         }
748 
openDevice(String cameraId)749         private void openDevice(String cameraId) {
750             if (mCamera != null) {
751                 throw new IllegalStateException("Already have open camera device");
752             }
753             try {
754                 mCamera = openCamera(
755                     mCameraManager, cameraId, mCameraListener, mHandler);
756             } catch (CameraAccessException e) {
757                 fail("Fail to open camera synchronously, " + Log.getStackTraceString(e));
758             } catch (BlockingOpenException e) {
759                 fail("Fail to open camera asynchronously, " + Log.getStackTraceString(e));
760             }
761             mCameraListener.waitForState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
762         }
763 
closeDevice(String cameraId)764         private void closeDevice(String cameraId) {
765             if (mCamera != null) {
766                 mCamera.close();
767                 mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS);
768                 mCamera = null;
769             }
770         }
771     }
772 
773     interface SizeBlock {
run(Size size)774         void run(Size size) throws CameraAccessException;
775     }
776 
777     class SizeIterable {
forEachSize(int format, SizeBlock runnable)778         public void forEachSize(int format, SizeBlock runnable) throws CameraAccessException {
779             assertNotNull("No camera opened", mCamera);
780             assertNotNull("No camera manager", mCameraManager);
781 
782             CameraCharacteristics properties =
783                     mCameraManager.getCameraCharacteristics(mCamera.getId());
784 
785             assertNotNull("Can't get camera properties!", properties);
786 
787             StreamConfigurationMap config =
788                     properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
789             int[] availableOutputFormats = config.getOutputFormats();
790             assertArrayNotEmpty(availableOutputFormats,
791                     "availableOutputFormats should not be empty");
792             Arrays.sort(availableOutputFormats);
793             assertTrue("Can't find the format " + format + " in supported formats " +
794                     Arrays.toString(availableOutputFormats),
795                     Arrays.binarySearch(availableOutputFormats, format) >= 0);
796 
797             Size[] availableSizes = getSupportedSizeForFormat(format, mCamera.getId(),
798                     mCameraManager);
799             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty");
800 
801             for (Size size : availableSizes) {
802 
803                 if (VERBOSE) {
804                     Log.v(TAG, "Testing size " + size.toString() +
805                             " for camera " + mCamera.getId());
806                 }
807                 runnable.run(size);
808             }
809         }
810     }
811 
812     interface ResultBlock {
run(CaptureResult result)813         void run(CaptureResult result) throws CameraAccessException;
814     }
815 
816     class ResultIterable {
forEachResultOnce(CaptureRequest request, ResultBlock block)817         public void forEachResultOnce(CaptureRequest request, ResultBlock block)
818                 throws CameraAccessException {
819             forEachResult(request, /*count*/1, /*repeating*/false, block);
820         }
821 
forEachResultRepeating(CaptureRequest request, int count, ResultBlock block)822         public void forEachResultRepeating(CaptureRequest request, int count, ResultBlock block)
823                 throws CameraAccessException {
824             forEachResult(request, count, /*repeating*/true, block);
825         }
826 
forEachResult(CaptureRequest request, int count, boolean repeating, ResultBlock block)827         public void forEachResult(CaptureRequest request, int count, boolean repeating,
828                 ResultBlock block) throws CameraAccessException {
829 
830             // TODO: start capture, i.e. configureOutputs
831 
832             SimpleCaptureCallback listener = new SimpleCaptureCallback();
833 
834             if (!repeating) {
835                 for (int i = 0; i < count; ++i) {
836                     mSession.capture(request, listener, mHandler);
837                 }
838             } else {
839                 mSession.setRepeatingRequest(request, listener, mHandler);
840             }
841 
842             // Assume that the device is already IDLE.
843             mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_ACTIVE,
844                     CAMERA_ACTIVE_TIMEOUT_MS);
845 
846             for (int i = 0; i < count; ++i) {
847                 if (VERBOSE) {
848                     Log.v(TAG, String.format("Testing with result %d of %d for camera %s",
849                             i, count, mCamera.getId()));
850                 }
851 
852                 CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
853                 block.run(result);
854             }
855 
856             if (repeating) {
857                 mSession.stopRepeating();
858                 mSessionListener.getStateWaiter().waitForState(
859                     BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS);
860             }
861 
862             // TODO: Make a Configure decorator or some such for configureOutputs
863         }
864     }
865 }
866