1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 import static android.hardware.camera2.CameraCharacteristics.*;
21 
22 import android.graphics.Point;
23 import android.graphics.PointF;
24 import android.graphics.Rect;
25 import android.graphics.SurfaceTexture;
26 import android.hardware.camera2.CameraCharacteristics;
27 import android.hardware.camera2.CameraDevice;
28 import android.hardware.camera2.CameraMetadata;
29 import android.hardware.camera2.CaptureRequest;
30 import android.hardware.camera2.CaptureResult;
31 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
32 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
33 import android.hardware.camera2.params.BlackLevelPattern;
34 import android.hardware.camera2.params.ColorSpaceTransform;
35 import android.hardware.camera2.params.Face;
36 import android.hardware.camera2.params.LensShadingMap;
37 import android.hardware.camera2.params.MeteringRectangle;
38 import android.hardware.camera2.params.RggbChannelVector;
39 import android.hardware.camera2.params.TonemapCurve;
40 import android.media.Image;
41 import android.os.Parcel;
42 import android.platform.test.annotations.AppModeFull;
43 import android.util.ArraySet;
44 import android.util.Log;
45 import android.util.Range;
46 import android.util.Rational;
47 import android.util.Size;
48 import android.view.Surface;
49 
50 import java.nio.ByteBuffer;
51 import java.util.ArrayList;
52 import java.util.Arrays;
53 import java.util.List;
54 
55 /**
56  * <p>
57  * Basic test for camera CaptureRequest key controls.
58  * </p>
59  * <p>
60  * Several test categories are covered: manual sensor control, 3A control,
61  * manual ISP control and other per-frame control and synchronization.
62  * </p>
63  */
64 @AppModeFull
65 public class CaptureRequestTest extends Camera2SurfaceViewTestCase {
66     private static final String TAG = "CaptureRequestTest";
67     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
68     private static final int NUM_FRAMES_VERIFIED = 15;
69     private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60;
70     /** 30ms exposure time must be supported by full capability devices. */
71     private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms
72     private static final int DEFAULT_SENSITIVITY = 100;
73     private static final int RGGB_COLOR_CHANNEL_COUNT = 4;
74     private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT;
75     private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT;
76     private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L;
77     private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms
78     private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation.
79     private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation.
80     private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
81     private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
82     private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3;
83     private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8;
84     private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100;
85     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
86     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
87     private static final int NUM_TEST_FOCUS_DISTANCES = 10;
88     private static final int NUM_FOCUS_DISTANCES_REPEAT = 3;
89     // 5 percent error margin for calibrated device
90     private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f;
91     // 25 percent error margin for uncalibrated device
92     private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f;
93     // 10 percent error margin for approximate device
94     private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f;
95     private static final int ANTI_FLICKERING_50HZ = 1;
96     private static final int ANTI_FLICKERING_60HZ = 2;
97     // 5 percent error margin for resulting crop regions
98     private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f;
99     // 1 percent error margin for centering the crop region
100     private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f;
101     private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f;
102     private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f;
103 
104     // Linear tone mapping curve example.
105     private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f};
106     // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points.
107     private static final float[] TONEMAP_CURVE_SRGB = {
108             0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f,
109             0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f,
110             0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f,
111             0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f
112     };
113     private final Rational ZERO_R = new Rational(0, 1);
114     private final Rational ONE_R = new Rational(1, 1);
115 
116     private final int NUM_ALGORITHMS = 3; // AE, AWB and AF
117     private final int INDEX_ALGORITHM_AE = 0;
118     private final int INDEX_ALGORITHM_AWB = 1;
119     private final int INDEX_ALGORITHM_AF = 2;
120 
121     private enum TorchSeqState {
122         RAMPING_UP,
123         FIRED,
124         RAMPING_DOWN
125     }
126 
127     @Override
setUp()128     protected void setUp() throws Exception {
129         super.setUp();
130     }
131 
132     @Override
tearDown()133     protected void tearDown() throws Exception {
134         super.tearDown();
135     }
136 
137     /**
138      * Test CaptureRequest settings parcelling.
139      */
testSettingsBinderParcel()140     public void testSettingsBinderParcel() throws Exception {
141         SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5);
142         Surface surface = new Surface(outputTexture);
143 
144         for (int i = 0; i < mCameraIds.length; i++) {
145             try {
146                 openDevice(mCameraIds[i]);
147                 CaptureRequest.Builder requestBuilder =
148                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
149                 requestBuilder.addTarget(surface);
150 
151                 // Check regular/default case
152                 CaptureRequest captureRequestOriginal = requestBuilder.build();
153                 Parcel p;
154                 p = Parcel.obtain();
155                 captureRequestOriginal.writeToParcel(p, 0);
156                 p.setDataPosition(0);
157                 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
158                 assertEquals("Parcelled camera settings should match",
159                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
160                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
161                 p.recycle();
162 
163                 // Check capture request with additional physical camera settings
164                 String physicalId = new String(Integer.toString(i + 1));
165                 ArraySet<String> physicalIds = new ArraySet<String> ();
166                 physicalIds.add(physicalId);
167 
168                 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW,
169                         physicalIds);
170                 requestBuilder.addTarget(surface);
171                 captureRequestOriginal = requestBuilder.build();
172                 p = Parcel.obtain();
173                 captureRequestOriginal.writeToParcel(p, 0);
174                 p.setDataPosition(0);
175                 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
176                 assertEquals("Parcelled camera settings should match",
177                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
178                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
179                 p.recycle();
180 
181                 // Check various invalid cases
182                 p = Parcel.obtain();
183                 p.writeInt(-1);
184                 p.setDataPosition(0);
185                 try {
186                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
187                     fail("should get RuntimeException due to invalid number of settings");
188                 } catch (RuntimeException e) {
189                     // Expected
190                 }
191                 p.recycle();
192 
193                 p = Parcel.obtain();
194                 p.writeInt(0);
195                 p.setDataPosition(0);
196                 try {
197                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
198                     fail("should get RuntimeException due to invalid number of settings");
199                 } catch (RuntimeException e) {
200                     // Expected
201                 }
202                 p.recycle();
203 
204                 p = Parcel.obtain();
205                 p.writeInt(1);
206                 p.setDataPosition(0);
207                 try {
208                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
209                     fail("should get RuntimeException due to absent settings");
210                 } catch (RuntimeException e) {
211                     // Expected
212                 }
213                 p.recycle();
214             } finally {
215                 closeDevice();
216             }
217         }
218     }
219 
220     /**
221      * Test black level lock when exposure value change.
222      * <p>
223      * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the
224      * camera device should lock the black level. When the exposure values are changed,
225      * the camera may require reset black level Since changes to certain capture
226      * parameters (such as exposure time) may require resetting of black level
227      * compensation. However, the black level must remain locked after exposure
228      * value changes (when requests have lock ON).
229      * </p>
230      */
testBlackLevelLock()231     public void testBlackLevelLock() throws Exception {
232         for (int i = 0; i < mCameraIds.length; i++) {
233             try {
234                 openDevice(mCameraIds[i]);
235 
236                 if (!mStaticInfo.isCapabilitySupported(
237                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
238                     continue;
239                 }
240 
241                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
242                 CaptureRequest.Builder requestBuilder =
243                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
244 
245                 // Start with default manual exposure time, with black level being locked.
246                 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true);
247                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
248 
249                 Size previewSz =
250                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
251                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
252 
253                 startPreview(requestBuilder, previewSz, listener);
254                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
255                 // No lock OFF state is allowed as the exposure is not changed.
256                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0);
257 
258                 // Double the exposure time and gain, with black level still being locked.
259                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2);
260                 listener = new SimpleCaptureCallback();
261                 startPreview(requestBuilder, previewSz, listener);
262                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
263                 // Allow at most one lock OFF state as the exposure is changed once.
264                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1);
265 
266                 stopPreview();
267             } finally {
268                 closeDevice();
269             }
270         }
271     }
272 
273     /**
274      * Test dynamic black/white levels if they are supported.
275      *
276      * <p>
277      * If the dynamic black and white levels are reported, test below:
278      *   1. the dynamic black and white levels shouldn't deviate from the global value too much
279      *   for different sensitivities.
280      *   2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and
281      *   calculate the optical black level values. The reported dynamic black level should be
282      *   close enough to the optical black level values.
283      * </p>
284      */
testDynamicBlackWhiteLevel()285     public void testDynamicBlackWhiteLevel() throws Exception {
286         for (String id : mCameraIds) {
287             try {
288                 openDevice(id);
289                 if (!mStaticInfo.isDynamicBlackLevelSupported()) {
290                     continue;
291                 }
292                 dynamicBlackWhiteLevelTestByCamera();
293             } finally {
294                 closeDevice();
295             }
296         }
297     }
298 
299     /**
300      * Basic lens shading map request test.
301      * <p>
302      * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will
303      * be applied by the camera device, and an identity lens shading map data
304      * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON.
305      * </p>
306      * <p>
307      * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction
308      * will be applied by the camera device. The lens shading map data can be
309      * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON.
310      * </p>
311      */
testLensShadingMap()312     public void testLensShadingMap() throws Exception {
313         for (int i = 0; i < mCameraIds.length; i++) {
314             try {
315                 openDevice(mCameraIds[i]);
316 
317                 if (!mStaticInfo.isManualLensShadingMapSupported()) {
318                     Log.i(TAG, "Camera " + mCameraIds[i] +
319                             " doesn't support lens shading controls, skipping test");
320                     continue;
321                 }
322 
323                 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject(
324                         mStaticInfo.getAvailableLensShadingMapModesChecked()));
325 
326                 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) {
327                     continue;
328                 }
329 
330                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
331                 CaptureRequest.Builder requestBuilder =
332                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
333                 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
334                         STATISTICS_LENS_SHADING_MAP_MODE_ON);
335 
336                 Size previewSz =
337                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
338                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
339                 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject(
340                         mStaticInfo.getAvailableLensShadingModesChecked()));
341 
342                 // Shading map mode OFF, lensShadingMapMode ON, camera device
343                 // should output unity maps.
344                 if (lensShadingModes.contains(SHADING_MODE_OFF)) {
345                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF);
346                     listener = new SimpleCaptureCallback();
347                     startPreview(requestBuilder, previewSz, listener);
348                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
349                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF);
350                 }
351 
352                 // Shading map mode FAST, lensShadingMapMode ON, camera device
353                 // should output valid maps.
354                 if (lensShadingModes.contains(SHADING_MODE_FAST)) {
355                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST);
356 
357                     listener = new SimpleCaptureCallback();
358                     startPreview(requestBuilder, previewSz, listener);
359                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
360                     // Allow at most one lock OFF state as the exposure is changed once.
361                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST);
362                 }
363 
364                 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device
365                 // should output valid maps.
366                 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) {
367                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY);
368 
369                     listener = new SimpleCaptureCallback();
370                     startPreview(requestBuilder, previewSz, listener);
371                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
372                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY);
373                 }
374 
375                 stopPreview();
376             } finally {
377                 closeDevice();
378             }
379         }
380     }
381 
382     /**
383      * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control.
384      * <p>
385      * Test all available anti-banding modes, check if the exposure time adjustment is
386      * correct.
387      * </p>
388      */
testAntiBandingModes()389     public void testAntiBandingModes() throws Exception {
390         for (int i = 0; i < mCameraIds.length; i++) {
391             try {
392                 openDevice(mCameraIds[i]);
393 
394                 // Without manual sensor control, exposure time cannot be verified
395                 if (!mStaticInfo.isCapabilitySupported(
396                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
397                     continue;
398                 }
399 
400                 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
401 
402                 Size previewSz =
403                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
404                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
405 
406                 for (int mode : modes) {
407                     antiBandingTestByMode(previewSz, mode);
408                 }
409             } finally {
410                 closeDevice();
411             }
412         }
413 
414     }
415 
416     /**
417      * Test AE mode and lock.
418      *
419      * <p>
420      * For AE lock, when it is locked, exposure parameters shouldn't be changed.
421      * For AE modes, each mode should satisfy the per frame controls defined in
422      * API specifications.
423      * </p>
424      */
testAeModeAndLock()425     public void testAeModeAndLock() throws Exception {
426         for (int i = 0; i < mCameraIds.length; i++) {
427             try {
428                 openDevice(mCameraIds[i]);
429                 if (!mStaticInfo.isColorOutputSupported()) {
430                     Log.i(TAG, "Camera " + mCameraIds[i] +
431                             " does not support color outputs, skipping");
432                     continue;
433                 }
434 
435                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
436 
437                 // Update preview surface with given size for all sub-tests.
438                 updatePreviewSurface(maxPreviewSz);
439 
440                 // Test aeMode and lock
441                 int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
442                 for (int mode : aeModes) {
443                     aeModeAndLockTestByMode(mode);
444                 }
445             } finally {
446                 closeDevice();
447             }
448         }
449     }
450 
451     /** Test {@link CaptureRequest#FLASH_MODE} control.
452      * <p>
453      * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control
454      * and {@link CaptureResult#FLASH_STATE} result.
455      * </p>
456      */
testFlashControl()457     public void testFlashControl() throws Exception {
458         for (int i = 0; i < mCameraIds.length; i++) {
459             try {
460                 openDevice(mCameraIds[i]);
461                 if (!mStaticInfo.isColorOutputSupported()) {
462                     Log.i(TAG, "Camera " + mCameraIds[i] +
463                             " does not support color outputs, skipping");
464                     continue;
465                 }
466 
467                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
468                 CaptureRequest.Builder requestBuilder =
469                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
470 
471                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
472 
473                 startPreview(requestBuilder, maxPreviewSz, listener);
474 
475                 // Flash control can only be used when the AE mode is ON or OFF.
476                 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON);
477 
478                 // LEGACY won't support AE mode OFF
479                 boolean aeOffModeSupported = false;
480                 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) {
481                     if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
482                         aeOffModeSupported = true;
483                     }
484                 }
485                 if (aeOffModeSupported) {
486                     flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF);
487                 }
488 
489                 stopPreview();
490             } finally {
491                 closeDevice();
492             }
493         }
494     }
495 
496     /**
497      * Test face detection modes and results.
498      */
testFaceDetection()499     public void testFaceDetection() throws Exception {
500         for (int i = 0; i < mCameraIds.length; i++) {
501             try {
502                 openDevice(mCameraIds[i]);
503                 if (!mStaticInfo.isColorOutputSupported()) {
504                     Log.i(TAG, "Camera " + mCameraIds[i] +
505                             " does not support color outputs, skipping");
506                     continue;
507                 }
508                 faceDetectionTestByCamera();
509             } finally {
510                 closeDevice();
511             }
512         }
513     }
514 
515     /**
516      * Test tone map modes and controls.
517      */
testToneMapControl()518     public void testToneMapControl() throws Exception {
519         for (String id : mCameraIds) {
520             try {
521                 openDevice(id);
522                 if (!mStaticInfo.isManualToneMapSupported()) {
523                     Log.i(TAG, "Camera " + id +
524                             " doesn't support tone mapping controls, skipping test");
525                     continue;
526                 }
527                 toneMapTestByCamera();
528             } finally {
529                 closeDevice();
530             }
531         }
532     }
533 
534     /**
535      * Test color correction modes and controls.
536      */
testColorCorrectionControl()537     public void testColorCorrectionControl() throws Exception {
538         for (String id : mCameraIds) {
539             try {
540                 openDevice(id);
541                 if (!mStaticInfo.isColorCorrectionSupported()) {
542                     Log.i(TAG, "Camera " + id +
543                             " doesn't support color correction controls, skipping test");
544                     continue;
545                 }
546                 colorCorrectionTestByCamera();
547             } finally {
548                 closeDevice();
549             }
550         }
551     }
552 
553     /**
554      * Test edge mode control for Fps not exceeding 30.
555      */
testEdgeModeControl()556     public void testEdgeModeControl() throws Exception {
557         for (String id : mCameraIds) {
558             try {
559                 openDevice(id);
560                 if (!mStaticInfo.isEdgeModeControlSupported()) {
561                     Log.i(TAG, "Camera " + id +
562                             " doesn't support EDGE_MODE controls, skipping test");
563                     continue;
564                 }
565 
566                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
567                 edgeModesTestByCamera(fpsRanges);
568             } finally {
569                 closeDevice();
570             }
571         }
572     }
573 
574     /**
575      * Test edge mode control for Fps greater than 30.
576      */
testEdgeModeControlFastFps()577     public void testEdgeModeControlFastFps() throws Exception {
578         for (String id : mCameraIds) {
579             try {
580                 openDevice(id);
581                 if (!mStaticInfo.isEdgeModeControlSupported()) {
582                     Log.i(TAG, "Camera " + id +
583                             " doesn't support EDGE_MODE controls, skipping test");
584                     continue;
585                 }
586 
587                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
588                 edgeModesTestByCamera(fpsRanges);
589             } finally {
590                 closeDevice();
591             }
592         }
593 
594     }
595 
596     /**
597      * Test focus distance control.
598      */
testFocusDistanceControl()599     public void testFocusDistanceControl() throws Exception {
600         for (String id : mCameraIds) {
601             try {
602                 openDevice(id);
603                 if (!mStaticInfo.hasFocuser()) {
604                     Log.i(TAG, "Camera " + id + " has no focuser, skipping test");
605                     continue;
606                 }
607 
608                 if (!mStaticInfo.isCapabilitySupported(
609                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
610                     Log.i(TAG, "Camera " + id +
611                             " does not support MANUAL_SENSOR, skipping test");
612                     continue;
613                 }
614 
615                 focusDistanceTestByCamera();
616             } finally {
617                 closeDevice();
618             }
619         }
620     }
621 
622     /**
623      * Test noise reduction mode for fps ranges not exceeding 30
624      */
testNoiseReductionModeControl()625     public void testNoiseReductionModeControl() throws Exception {
626         for (String id : mCameraIds) {
627             try {
628                 openDevice(id);
629                 if (!mStaticInfo.isNoiseReductionModeControlSupported()) {
630                     Log.i(TAG, "Camera " + id +
631                             " doesn't support noise reduction mode, skipping test");
632                     continue;
633                 }
634 
635                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
636                 noiseReductionModeTestByCamera(fpsRanges);
637             } finally {
638                 closeDevice();
639             }
640         }
641     }
642 
643     /**
644      * Test noise reduction mode for fps ranges greater than 30
645      */
testNoiseReductionModeControlFastFps()646     public void testNoiseReductionModeControlFastFps() throws Exception {
647         for (String id : mCameraIds) {
648             try {
649                 openDevice(id);
650                 if (!mStaticInfo.isNoiseReductionModeControlSupported()) {
651                     Log.i(TAG, "Camera " + id +
652                             " doesn't support noise reduction mode, skipping test");
653                     continue;
654                 }
655 
656                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
657                 noiseReductionModeTestByCamera(fpsRanges);
658             } finally {
659                 closeDevice();
660             }
661         }
662     }
663 
664     /**
665      * Test AWB lock control.
666      *
667      * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p>
668      */
testAwbModeAndLock()669     public void testAwbModeAndLock() throws Exception {
670         for (String id : mCameraIds) {
671             try {
672                 openDevice(id);
673                 if (!mStaticInfo.isColorOutputSupported()) {
674                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
675                     continue;
676                 }
677                 awbModeAndLockTestByCamera();
678             } finally {
679                 closeDevice();
680             }
681         }
682     }
683 
684     /**
685      * Test different AF modes.
686      */
testAfModes()687     public void testAfModes() throws Exception {
688         for (String id : mCameraIds) {
689             try {
690                 openDevice(id);
691                 if (!mStaticInfo.isColorOutputSupported()) {
692                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
693                     continue;
694                 }
695                 afModeTestByCamera();
696             } finally {
697                 closeDevice();
698             }
699         }
700     }
701 
702     /**
703      * Test video and optical stabilizations.
704      */
testCameraStabilizations()705     public void testCameraStabilizations() throws Exception {
706         for (String id : mCameraIds) {
707             try {
708                 openDevice(id);
709                 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
710                 if (!(keys.contains(
711                         CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ||
712                         keys.contains(
713                                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) {
714                     Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes");
715                     continue;
716                 }
717                 if (!mStaticInfo.isColorOutputSupported()) {
718                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
719                     continue;
720                 }
721                 stabilizationTestByCamera();
722             } finally {
723                 closeDevice();
724             }
725         }
726     }
727 
728     /**
729      * Test digitalZoom (center wise and non-center wise), validate the returned crop regions.
730      * The max preview size is used for each camera.
731      */
testDigitalZoom()732     public void testDigitalZoom() throws Exception {
733         for (String id : mCameraIds) {
734             try {
735                 openDevice(id);
736                 if (!mStaticInfo.isColorOutputSupported()) {
737                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
738                     continue;
739                 }
740                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
741                 digitalZoomTestByCamera(maxPreviewSize);
742             } finally {
743                 closeDevice();
744             }
745         }
746     }
747 
748     /**
749      * Test digital zoom and all preview size combinations.
750      * TODO: this and above test should all be moved to preview test class.
751      */
testDigitalZoomPreviewCombinations()752     public void testDigitalZoomPreviewCombinations() throws Exception {
753         for (String id : mCameraIds) {
754             try {
755                 openDevice(id);
756                 if (!mStaticInfo.isColorOutputSupported()) {
757                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
758                     continue;
759                 }
760                 digitalZoomPreviewCombinationTestByCamera();
761             } finally {
762                 closeDevice();
763             }
764         }
765     }
766 
767     /**
768      * Test scene mode controls.
769      */
testSceneModes()770     public void testSceneModes() throws Exception {
771         for (String id : mCameraIds) {
772             try {
773                 openDevice(id);
774                 if (mStaticInfo.isSceneModeSupported()) {
775                     sceneModeTestByCamera();
776                 }
777             } finally {
778                 closeDevice();
779             }
780         }
781     }
782 
783     /**
784      * Test effect mode controls.
785      */
testEffectModes()786     public void testEffectModes() throws Exception {
787         for (String id : mCameraIds) {
788             try {
789                 openDevice(id);
790                 if (!mStaticInfo.isColorOutputSupported()) {
791                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
792                     continue;
793                 }
794                 effectModeTestByCamera();
795             } finally {
796                 closeDevice();
797             }
798         }
799     }
800 
801     // TODO: add 3A state machine test.
802 
803     /**
804      * Per camera dynamic black and white level test.
805      */
dynamicBlackWhiteLevelTestByCamera()806     private void dynamicBlackWhiteLevelTestByCamera() throws Exception {
807         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
808         SimpleImageReaderListener imageListener = null;
809         CaptureRequest.Builder previewBuilder =
810                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
811         CaptureRequest.Builder rawBuilder = null;
812         Size previewSize =
813                 getMaxPreviewSize(mCamera.getId(), mCameraManager,
814                 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
815         Size rawSize = null;
816         boolean canCaptureBlackRaw =
817                 mStaticInfo.isCapabilitySupported(
818                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) &&
819                 mStaticInfo.isOpticalBlackRegionSupported();
820         if (canCaptureBlackRaw) {
821             // Capture Raw16, then calculate the optical black, and use it to check with the dynamic
822             // black level.
823             rawBuilder =
824                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
825             rawSize = mStaticInfo.getRawDimensChecked();
826             imageListener = new SimpleImageReaderListener();
827             prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize,
828                     resultListener, imageListener);
829         } else {
830             startPreview(previewBuilder, previewSize, resultListener);
831         }
832 
833         // Capture a sequence of frames with different sensitivities and validate the black/white
834         // level values
835         int[] sensitivities = getSensitivityTestValues();
836         float[][] dynamicBlackLevels = new float[sensitivities.length][];
837         int[] dynamicWhiteLevels = new int[sensitivities.length];
838         float[][] opticalBlackLevels = new float[sensitivities.length][];
839         for (int i = 0; i < sensitivities.length; i++) {
840             CaptureResult result = null;
841             if (canCaptureBlackRaw) {
842                 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
843                 CaptureRequest rawRequest = rawBuilder.build();
844                 mSession.capture(rawRequest, resultListener, mHandler);
845                 result = resultListener.getCaptureResultForRequest(rawRequest,
846                         NUM_RESULTS_WAIT_TIMEOUT);
847                 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
848 
849                 // Get max (area-wise) optical black region
850                 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get(
851                         CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS);
852                 Rect maxRegion = opticalBlackRegions[0];
853                 for (Rect region : opticalBlackRegions) {
854                     if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) {
855                         maxRegion = region;
856                     }
857                 }
858 
859                 // Get average black pixel values in the region (region is multiple of 2x2)
860                 Image.Plane rawPlane = rawImage.getPlanes()[0];
861                 ByteBuffer rawBuffer = rawPlane.getBuffer();
862                 float[] avgBlackLevels = {0, 0, 0, 0};
863                 final int rowSize = rawPlane.getRowStride();
864                 final int bytePerPixel = rawPlane.getPixelStride();
865                 if (VERBOSE) {
866                     Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " +
867                             rawPlane.getRowStride());
868                 }
869                 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) {
870                     for (int col = maxRegion.left; col < maxRegion.right; col += 2) {
871                         int startOffset = row * rowSize + col * bytePerPixel;
872                         avgBlackLevels[0] += rawBuffer.getShort(startOffset);
873                         avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel);
874                         startOffset += rowSize;
875                         avgBlackLevels[2] += rawBuffer.getShort(startOffset);
876                         avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel);
877                     }
878                 }
879                 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2);
880                 for (int m = 0; m < avgBlackLevels.length; m++) {
881                     avgBlackLevels[m] /= numBlackBlocks;
882                 }
883                 opticalBlackLevels[i] = avgBlackLevels;
884 
885                 if (VERBOSE) {
886                     Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s",
887                             sensitivities[i], Arrays.toString(avgBlackLevels)));
888                 }
889 
890                 rawImage.close();
891             } else {
892                 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
893                 CaptureRequest previewRequest = previewBuilder.build();
894                 mSession.capture(previewRequest, resultListener, mHandler);
895                 result = resultListener.getCaptureResultForRequest(previewRequest,
896                         NUM_RESULTS_WAIT_TIMEOUT);
897             }
898 
899             dynamicBlackLevels[i] = getValueNotNull(result,
900                     CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL);
901             dynamicWhiteLevels[i] = getValueNotNull(result,
902                     CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL);
903         }
904 
905         if (VERBOSE) {
906             Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities));
907             Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels));
908             Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels));
909             if (canCaptureBlackRaw) {
910                 Log.v(TAG, "Optical black level results " +
911                         Arrays.deepToString(opticalBlackLevels));
912             }
913         }
914 
915         // check the dynamic black level against global black level.
916         // Implicit guarantee: if the dynamic black level is supported, fixed black level must be
917         // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions).
918         BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get(
919                 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN);
920         int[] fixedBlackLevels = new int[4];
921         int fixedWhiteLevel = mStaticInfo.getCharacteristics().get(
922                 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL);
923         blackPattern.copyTo(fixedBlackLevels, 0);
924         float maxBlackDeviation = 0;
925         int maxWhiteDeviation = 0;
926         for (int i = 0; i < dynamicBlackLevels.length; i++) {
927             for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
928                 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) {
929                     maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]);
930                 }
931             }
932             if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) {
933                 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel);
934             }
935         }
936         mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level"
937                 + " exceed threshold."
938                 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels),
939                 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation);
940         mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold."
941                 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels),
942                 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN,
943                 (float)maxWhiteDeviation);
944 
945         // Validate against optical black levels if it is available
946         if (canCaptureBlackRaw) {
947             maxBlackDeviation = 0;
948             for (int i = 0; i < dynamicBlackLevels.length; i++) {
949                 for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
950                     if (maxBlackDeviation <
951                             Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) {
952                         maxBlackDeviation =
953                                 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]);
954                     }
955                 }
956             }
957 
958             mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black"
959                     + " exceed threshold."
960                     + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)
961                     + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels),
962                     fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN,
963                     maxBlackDeviation);
964         }
965     }
966 
noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)967     private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
968         Size maxPrevSize = mOrderedPreviewSizes.get(0);
969         CaptureRequest.Builder requestBuilder =
970                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
971         int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
972 
973         for (int mode : availableModes) {
974             requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode);
975 
976             // Test that OFF and FAST mode should not slow down the frame rate.
977             if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF ||
978                     mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) {
979                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
980             }
981 
982             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
983             startPreview(requestBuilder, maxPrevSize, resultListener);
984             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
985             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
986 
987             verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode,
988                     resultListener, NUM_FRAMES_VERIFIED);
989         }
990 
991         stopPreview();
992     }
993 
focusDistanceTestByCamera()994     private void focusDistanceTestByCamera() throws Exception {
995         CaptureRequest.Builder requestBuilder =
996                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
997         requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
998         int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked();
999         float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED;
1000         if (calibrationStatus ==
1001                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) {
1002             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED;
1003         } else if (calibrationStatus ==
1004                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) {
1005             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE;
1006         }
1007 
1008         // Test changing focus distance with repeating request
1009         focusDistanceTestRepeating(requestBuilder, errorMargin);
1010 
1011         if (calibrationStatus ==
1012                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED)  {
1013             // Test changing focus distance with burst request
1014             focusDistanceTestBurst(requestBuilder, errorMargin);
1015         }
1016     }
1017 
focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1018     private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder,
1019             float errorMargin) throws Exception {
1020         CaptureRequest request;
1021         float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0);
1022         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1023         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1024         startPreview(requestBuilder, maxPrevSize, resultListener);
1025 
1026         float[] resultDistances = new float[testDistances.length];
1027         int[] resultLensStates = new int[testDistances.length];
1028 
1029         // Collect results
1030         for (int i = 0; i < testDistances.length; i++) {
1031             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1032             request = requestBuilder.build();
1033             resultListener = new SimpleCaptureCallback();
1034             mSession.setRepeatingRequest(request, resultListener, mHandler);
1035             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1036             waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1037                     CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1038             CaptureResult result = resultListener.getCaptureResultForRequest(request,
1039                     NUM_RESULTS_WAIT_TIMEOUT);
1040 
1041             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1042             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1043 
1044             if (VERBOSE) {
1045                 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i]
1046                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1047             }
1048         }
1049 
1050         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1051                 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0,
1052                 errorMargin);
1053 
1054         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1055 
1056             // Test hyperfocal distance optionally
1057             float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1058             if (hyperFocalDistance > 0) {
1059                 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance);
1060                 request = requestBuilder.build();
1061                 resultListener = new SimpleCaptureCallback();
1062                 mSession.setRepeatingRequest(request, resultListener, mHandler);
1063                 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1064 
1065                 // Then wait for the lens.state to be stationary.
1066                 waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1067                         CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1068                 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1069                 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1070                 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" +
1071                         " requested value", focusDistance,
1072                         hyperFocalDistance * (1.0f - errorMargin),
1073                         hyperFocalDistance * (1.0f + errorMargin));
1074             }
1075         }
1076     }
1077 
focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1078     private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder,
1079             float errorMargin) throws Exception {
1080 
1081         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1082         float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT,
1083                 NUM_FOCUS_DISTANCES_REPEAT);
1084         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1085         startPreview(requestBuilder, maxPrevSize, resultListener);
1086 
1087         float[] resultDistances = new float[testDistances.length];
1088         int[] resultLensStates = new int[testDistances.length];
1089 
1090         final int maxPipelineDepth = mStaticInfo.getCharacteristics().get(
1091             CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
1092 
1093         // Move lens to starting position, and wait for the lens.state to be stationary.
1094         CaptureRequest request;
1095         requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]);
1096         request = requestBuilder.build();
1097         mSession.setRepeatingRequest(request, resultListener, mHandler);
1098         waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1099                 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1100 
1101         // Submit burst of requests with different focus distances
1102         List<CaptureRequest> burst = new ArrayList<>();
1103         for (int i = 0; i < testDistances.length; i ++) {
1104             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1105             burst.add(requestBuilder.build());
1106         }
1107         mSession.captureBurst(burst, resultListener, mHandler);
1108 
1109         for (int i = 0; i < testDistances.length; i++) {
1110             CaptureResult result = resultListener.getCaptureResultForRequest(
1111                     burst.get(i), maxPipelineDepth+1);
1112 
1113             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1114             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1115 
1116             if (VERBOSE) {
1117                 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i]
1118                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1119             }
1120         }
1121 
1122         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1123                 /*ascendingOrder*/true, /*noOvershoot*/true,
1124                 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT,
1125                 errorMargin);
1126 
1127     }
1128 
1129     /**
1130      * Verify focus distance control.
1131      *
1132      * Assumption:
1133      * - First repeatStart+1 elements of requestedDistances share the same value
1134      * - Last repeatEnd+1 elements of requestedDistances share the same value
1135      * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder.
1136      * - Focuser is at requestedDistances[0] at the beginning of the test.
1137      *
1138      * @param requestedDistances The requested focus distances
1139      * @param resultDistances The result focus distances
1140      * @param lensStates The result lens states
1141      * @param ascendingOrder The order of the expected focus distance request/output
1142      * @param noOvershoot Assert that focus control doesn't overshoot the requested value
1143      * @param repeatStart The number of times the starting focus distance is repeated
1144      * @param repeatEnd The number of times the ending focus distance is repeated
1145      * @param errorMargin The error margin between request and result
1146      */
verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1147     private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances,
1148             int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart,
1149             int repeatEnd, float errorMargin) {
1150 
1151         float minValue = 0;
1152         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
1153         float hyperfocalDistance = 0;
1154         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1155             hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1156         }
1157 
1158         // Verify lens and focus distance do not change for first repeatStart
1159         // results.
1160         for (int i = 0; i < repeatStart; i ++) {
1161             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1162             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1163             float marginMax =
1164                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1165 
1166             mCollector.expectEquals("Lens moves even though focus_distance didn't change",
1167                     lensStates[i], CaptureResult.LENS_STATE_STATIONARY);
1168             if (noOvershoot) {
1169                 mCollector.expectInRange("Focus distance in result should be close enough to " +
1170                         "requested value", resultDistances[i], marginMin, marginMax);
1171             }
1172             mCollector.expectInRange("Result focus distance is out of range",
1173                     resultDistances[i], minValue, maxValue);
1174         }
1175 
1176         for (int i = repeatStart; i < resultDistances.length-1; i ++) {
1177             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1178             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1179             float marginMax =
1180                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1181             if (noOvershoot) {
1182                 // Result focus distance shouldn't overshoot the request
1183                 boolean condition;
1184                 if (ascendingOrder) {
1185                     condition = resultDistances[i] <= marginMax;
1186                } else {
1187                     condition = resultDistances[i] >= marginMin;
1188                 }
1189                 mCollector.expectTrue(String.format(
1190                       "Lens shouldn't move past request focus distance. result " +
1191                       resultDistances[i] + " vs target of " +
1192                       (ascendingOrder ? marginMax : marginMin)), condition);
1193             }
1194 
1195             // Verify monotonically increased focus distance setting
1196             boolean condition;
1197             float compareDistance = resultDistances[i+1] - resultDistances[i];
1198             if (i < resultDistances.length-1-repeatEnd) {
1199                 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0);
1200             } else {
1201                 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0);
1202             }
1203             mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results ["
1204                   + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + ","
1205                   + lensStates[i+1] + "] monotonicity is broken"), condition);
1206         }
1207 
1208         mCollector.expectTrue(String.format("All values of this array are equal: " +
1209                 resultDistances[0] + " " + resultDistances[resultDistances.length-1]),
1210                 resultDistances[0] != resultDistances[resultDistances.length-1]);
1211 
1212         // Verify lens moved to destination location.
1213         mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] +
1214                 " for minFocusDistance should be closed enough to requested value " +
1215                 requestedDistances[requestedDistances.length-1],
1216                 resultDistances[resultDistances.length-1],
1217                 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin),
1218                 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin));
1219     }
1220 
1221     /**
1222      * Verify edge mode control results for fpsRanges
1223      */
edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1224     private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
1225         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1226         int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
1227         CaptureRequest.Builder requestBuilder =
1228                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1229 
1230         for (int mode : edgeModes) {
1231             requestBuilder.set(CaptureRequest.EDGE_MODE, mode);
1232 
1233             // Test that OFF and FAST mode should not slow down the frame rate.
1234             if (mode == CaptureRequest.EDGE_MODE_OFF ||
1235                     mode == CaptureRequest.EDGE_MODE_FAST) {
1236                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
1237             }
1238 
1239             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1240             startPreview(requestBuilder, maxPrevSize, resultListener);
1241             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1242             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1243 
1244             verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener,
1245                     NUM_FRAMES_VERIFIED);
1246        }
1247 
1248         stopPreview();
1249     }
1250 
1251     /**
1252      * Test color correction controls.
1253      *
1254      * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test
1255      * the unit gain and identity transform.</p>
1256      */
colorCorrectionTestByCamera()1257     private void colorCorrectionTestByCamera() throws Exception {
1258         CaptureRequest request;
1259         CaptureResult result;
1260         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1261         updatePreviewSurface(maxPreviewSz);
1262         CaptureRequest.Builder manualRequestBuilder = createRequestForPreview();
1263         CaptureRequest.Builder previewRequestBuilder = createRequestForPreview();
1264         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1265 
1266         startPreview(previewRequestBuilder, maxPreviewSz, listener);
1267 
1268         // Default preview result should give valid color correction metadata.
1269         result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1270         validateColorCorrectionResult(result,
1271                 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE));
1272         int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
1273         // TRANSFORM_MATRIX mode
1274         // Only test unit gain and identity transform
1275         List<Integer> availableControlModes = Arrays.asList(
1276                 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked()));
1277         List<Integer> availableAwbModes = Arrays.asList(
1278                 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked()));
1279         boolean isManualCCSupported =
1280                 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) ||
1281                 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF);
1282         if (isManualCCSupported) {
1283             if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) {
1284                 // Only manual AWB mode is supported
1285                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1286                         CaptureRequest.CONTROL_MODE_AUTO);
1287                 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1288                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1289             } else {
1290                 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode.
1291                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1292                         CaptureRequest.CONTROL_MODE_OFF);
1293             }
1294 
1295             RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
1296 
1297             ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
1298                 new Rational[] {
1299                     ONE_R, ZERO_R, ZERO_R,
1300                     ZERO_R, ONE_R, ZERO_R,
1301                     ZERO_R, ZERO_R, ONE_R
1302                 });
1303 
1304             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1305             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
1306             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
1307             request = manualRequestBuilder.build();
1308             mSession.capture(request, listener, mHandler);
1309             result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1310             RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
1311             ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
1312             validateColorCorrectionResult(result, colorCorrectionMode);
1313             mCollector.expectEquals("control mode result/request mismatch",
1314                     CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
1315             mCollector.expectEquals("Color correction gain result/request mismatch",
1316                     UNIT_GAIN, gains);
1317             mCollector.expectEquals("Color correction gain result/request mismatch",
1318                     IDENTITY_TRANSFORM, transform);
1319 
1320         }
1321 
1322         // FAST mode
1323         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST;
1324         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1325         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1326         request = manualRequestBuilder.build();
1327         mSession.capture(request, listener, mHandler);
1328         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1329         validateColorCorrectionResult(result, colorCorrectionMode);
1330         mCollector.expectEquals("control mode result/request mismatch",
1331                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1332 
1333         // HIGH_QUALITY mode
1334         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY;
1335         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1336         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1337         request = manualRequestBuilder.build();
1338         mSession.capture(request, listener, mHandler);
1339         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1340         validateColorCorrectionResult(result, colorCorrectionMode);
1341         mCollector.expectEquals("control mode result/request mismatch",
1342                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1343     }
1344 
validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1345     private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) {
1346         final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0);
1347         final int TRANSFORM_SIZE = 9;
1348         Rational[] zeroTransform = new Rational[TRANSFORM_SIZE];
1349         Arrays.fill(zeroTransform, ZERO_R);
1350         final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform);
1351 
1352         RggbChannelVector resultGain;
1353         if ((resultGain = mCollector.expectKeyValueNotNull(result,
1354                 CaptureResult.COLOR_CORRECTION_GAINS)) != null) {
1355             mCollector.expectKeyValueNotEquals(result,
1356                     CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS);
1357         }
1358 
1359         ColorSpaceTransform resultTransform;
1360         if ((resultTransform = mCollector.expectKeyValueNotNull(result,
1361                 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) {
1362             mCollector.expectKeyValueNotEquals(result,
1363                     CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM);
1364         }
1365 
1366         mCollector.expectEquals("color correction mode result/request mismatch",
1367                 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE));
1368     }
1369 
1370     /**
1371      * Test flash mode control by AE mode.
1372      * <p>
1373      * Only allow AE mode ON or OFF, because other AE mode could run into conflict with
1374      * flash manual control. This function expects the camera to already have an active
1375      * repeating request and be sending results to the listener.
1376      * </p>
1377      *
1378      * @param listener The Capture listener that is used to wait for capture result
1379      * @param aeMode The AE mode for flash to test with
1380      */
flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1381     private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception {
1382         CaptureResult result;
1383         final int NUM_FLASH_REQUESTS_TESTED = 10;
1384         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1385 
1386         if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) {
1387             requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
1388         } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
1389             changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
1390         } else {
1391             throw new IllegalArgumentException("This test only works when AE mode is ON or OFF");
1392         }
1393 
1394         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1395         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1396 
1397         // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE.
1398         if (mStaticInfo.getFlashInfoChecked() == false) {
1399             for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1400                 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1401                 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE"
1402                         + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE,
1403                         result.get(CaptureResult.FLASH_STATE));
1404             }
1405 
1406             return;
1407         }
1408 
1409         // Test flash SINGLE mode control. Wait for flash state to be READY first.
1410         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
1411             waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY,
1412                     NUM_RESULTS_WAIT_TIMEOUT);
1413         } // else the settings were already waited on earlier
1414 
1415         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
1416         CaptureRequest flashSinglerequest = requestBuilder.build();
1417 
1418         int flashModeSingleRequests = captureRequestsSynchronized(
1419                 flashSinglerequest, listener, mHandler);
1420         waitForNumResults(listener, flashModeSingleRequests - 1);
1421         result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT);
1422         // Result mode must be SINGLE, state must be FIRED.
1423         mCollector.expectEquals("Flash mode result must be SINGLE",
1424                 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE));
1425         mCollector.expectEquals("Flash state result must be FIRED",
1426                 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1427 
1428         // Test flash TORCH mode control.
1429         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1430         CaptureRequest torchRequest = requestBuilder.build();
1431 
1432         int flashModeTorchRequests = captureRequestsSynchronized(torchRequest,
1433                 NUM_FLASH_REQUESTS_TESTED, listener, mHandler);
1434         waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED);
1435 
1436         // Verify the results
1437         TorchSeqState state = TorchSeqState.RAMPING_UP;
1438         for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1439             result = listener.getCaptureResultForRequest(torchRequest,
1440                     NUM_RESULTS_WAIT_TIMEOUT);
1441             int flashMode = result.get(CaptureResult.FLASH_MODE);
1442             int flashState = result.get(CaptureResult.FLASH_STATE);
1443             // Result mode must be TORCH
1444             mCollector.expectEquals("Flash mode result " + i + " must be TORCH",
1445                     CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE));
1446             if (state == TorchSeqState.RAMPING_UP &&
1447                     flashState == CaptureResult.FLASH_STATE_FIRED) {
1448                 state = TorchSeqState.FIRED;
1449             } else if (state == TorchSeqState.FIRED &&
1450                     flashState == CaptureResult.FLASH_STATE_PARTIAL) {
1451                 state = TorchSeqState.RAMPING_DOWN;
1452             }
1453 
1454             if (i == 0 && mStaticInfo.isPerFrameControlSupported()) {
1455                 mCollector.expectTrue(
1456                         "Per frame control device must enter FIRED state on first torch request",
1457                         state == TorchSeqState.FIRED);
1458             }
1459 
1460             if (state == TorchSeqState.FIRED) {
1461                 mCollector.expectEquals("Flash state result " + i + " must be FIRED",
1462                         CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1463             } else {
1464                 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL",
1465                         CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE));
1466             }
1467         }
1468         mCollector.expectTrue("Torch state FIRED never seen",
1469                 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN);
1470 
1471         // Test flash OFF mode control
1472         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1473         CaptureRequest flashOffrequest = requestBuilder.build();
1474 
1475         int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler);
1476         waitForNumResults(listener, flashModeOffRequests - 1);
1477         result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT);
1478         mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF,
1479                 result.get(CaptureResult.FLASH_MODE));
1480     }
1481 
verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1482     private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified,
1483             int mode, boolean isAeManual, long requestExpTime) throws Exception {
1484         // Skip the first a couple of frames as antibanding may not be fully up yet.
1485         final int NUM_FRAMES_SKIPPED = 5;
1486         for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) {
1487             listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1488         }
1489 
1490         for (int i = 0; i < numFramesVerified; i++) {
1491             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1492             Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
1493             assertNotNull("Exposure time shouldn't be null", resultExpTime);
1494             Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER);
1495             // Scene flicker result should be always available.
1496             assertNotNull("Scene flicker must not be null", flicker);
1497             assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE &&
1498                     flicker <= STATISTICS_SCENE_FLICKER_60HZ);
1499 
1500             if (isAeManual) {
1501                 // First, round down not up, second, need close enough.
1502                 validateExposureTime(requestExpTime, resultExpTime);
1503                 return;
1504             }
1505 
1506             long expectedExpTime = resultExpTime; // Default, no exposure adjustment.
1507             if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) {
1508                 // result exposure time must be adjusted by 50Hz illuminant source.
1509                 expectedExpTime =
1510                         getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1511             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) {
1512                 // result exposure time must be adjusted by 60Hz illuminant source.
1513                 expectedExpTime =
1514                         getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1515             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){
1516                 /**
1517                  * Use STATISTICS_SCENE_FLICKER to tell the illuminant source
1518                  * and do the exposure adjustment.
1519                  */
1520                 expectedExpTime = resultExpTime;
1521                 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) {
1522                     expectedExpTime =
1523                             getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1524                 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) {
1525                     expectedExpTime =
1526                             getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1527                 }
1528             }
1529 
1530             if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) {
1531                 mCollector.addMessage(String.format("Result exposure time %dns diverges too much"
1532                         + " from expected exposure time %dns for mode %d when AE is auto",
1533                         resultExpTime, expectedExpTime, mode));
1534             }
1535         }
1536     }
1537 
antiBandingTestByMode(Size size, int mode)1538     private void antiBandingTestByMode(Size size, int mode)
1539             throws Exception {
1540         if(VERBOSE) {
1541             Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId());
1542         }
1543         CaptureRequest.Builder requestBuilder =
1544                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1545 
1546         requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode);
1547 
1548         // Test auto AE mode anti-banding behavior
1549         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1550         startPreview(requestBuilder, size, resultListener);
1551         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1552         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false,
1553                 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK);
1554 
1555         // Test manual AE mode anti-banding behavior
1556         // 65ms, must be supported by full capability devices.
1557         final long TEST_MANUAL_EXP_TIME_NS = 65000000L;
1558         long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS);
1559         changeExposure(requestBuilder, manualExpTime);
1560         resultListener = new SimpleCaptureCallback();
1561         startPreview(requestBuilder, size, resultListener);
1562         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1563         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true,
1564                 manualExpTime);
1565 
1566         stopPreview();
1567     }
1568 
1569     /**
1570      * Test the all available AE modes and AE lock.
1571      * <p>
1572      * For manual AE mode, test iterates through different sensitivities and
1573      * exposure times, validate the result exposure time correctness. For
1574      * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested.
1575      * For the rest of the AUTO mode, AE lock is tested.
1576      * </p>
1577      *
1578      * @param mode
1579      */
aeModeAndLockTestByMode(int mode)1580     private void aeModeAndLockTestByMode(int mode)
1581             throws Exception {
1582         switch (mode) {
1583             case CONTROL_AE_MODE_OFF:
1584                 if (mStaticInfo.isCapabilitySupported(
1585                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
1586                     // Test manual exposure control.
1587                     aeManualControlTest();
1588                 } else {
1589                     Log.w(TAG,
1590                             "aeModeAndLockTestByMode - can't test AE mode OFF without " +
1591                             "manual sensor control");
1592                 }
1593                 break;
1594             case CONTROL_AE_MODE_ON:
1595             case CONTROL_AE_MODE_ON_AUTO_FLASH:
1596             case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1597             case CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1598             case CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
1599                 // Test AE lock for above AUTO modes.
1600                 aeAutoModeTestLock(mode);
1601                 break;
1602             default:
1603                 throw new UnsupportedOperationException("Unhandled AE mode " + mode);
1604         }
1605     }
1606 
1607     /**
1608      * Test AE auto modes.
1609      * <p>
1610      * Use single request rather than repeating request to test AE lock per frame control.
1611      * </p>
1612      */
aeAutoModeTestLock(int mode)1613     private void aeAutoModeTestLock(int mode) throws Exception {
1614         CaptureRequest.Builder requestBuilder =
1615                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1616         if (mStaticInfo.isAeLockSupported()) {
1617             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1618         }
1619         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode);
1620         configurePreviewOutput(requestBuilder);
1621 
1622         final int MAX_NUM_CAPTURES_DURING_LOCK = 5;
1623         for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) {
1624             autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i);
1625         }
1626     }
1627 
1628     /**
1629      * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
1630      * the first capture result after the AE lock. The right AE lock behavior is:
1631      * When it is locked, it locks to the current exposure value, and all subsequent
1632      * request with lock ON will have the same exposure value locked.
1633      */
autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)1634     private void autoAeMultipleCapturesThenTestLock(
1635             CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)
1636             throws Exception {
1637         if (numCapturesDuringLock < 1) {
1638             throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
1639         }
1640         if (VERBOSE) {
1641             Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode "
1642                     + aeMode + " with " + numCapturesDuringLock + " captures before lock");
1643         }
1644 
1645         final int NUM_CAPTURES_BEFORE_LOCK = 2;
1646         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1647 
1648         CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
1649         boolean canSetAeLock = mStaticInfo.isAeLockSupported();
1650 
1651         // Reset the AE lock to OFF, since we are reusing this builder many times
1652         if (canSetAeLock) {
1653             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1654         }
1655 
1656         // Just send several captures with auto AE, lock off.
1657         CaptureRequest request = requestBuilder.build();
1658         for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
1659             mSession.capture(request, listener, mHandler);
1660         }
1661         waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
1662 
1663         if (!canSetAeLock) {
1664             // Without AE lock, the remaining tests items won't work
1665             return;
1666         }
1667 
1668         // Then fire several capture to lock the AE.
1669         requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1670 
1671         int requestCount = captureRequestsSynchronized(
1672                 requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
1673 
1674         int[] sensitivities = new int[numCapturesDuringLock];
1675         long[] expTimes = new long[numCapturesDuringLock];
1676         Arrays.fill(sensitivities, -1);
1677         Arrays.fill(expTimes, -1L);
1678 
1679         // Get the AE lock on result and validate the exposure values.
1680         waitForNumResults(listener, requestCount - numCapturesDuringLock);
1681         for (int i = 0; i < resultsDuringLock.length; i++) {
1682             resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1683         }
1684 
1685         for (int i = 0; i < numCapturesDuringLock; i++) {
1686             mCollector.expectKeyValueEquals(
1687                     resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
1688         }
1689 
1690         // Can't read manual sensor/exposure settings without manual sensor
1691         if (mStaticInfo.isCapabilitySupported(
1692                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1693             int sensitivityLocked =
1694                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
1695             long expTimeLocked =
1696                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
1697             for (int i = 1; i < resultsDuringLock.length; i++) {
1698                 mCollector.expectKeyValueEquals(
1699                         resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
1700                 mCollector.expectKeyValueEquals(
1701                         resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
1702             }
1703         }
1704     }
1705 
1706     /**
1707      * Iterate through exposure times and sensitivities for manual AE control.
1708      * <p>
1709      * Use single request rather than repeating request to test manual exposure
1710      * value change per frame control.
1711      * </p>
1712      */
aeManualControlTest()1713     private void aeManualControlTest()
1714             throws Exception {
1715         CaptureRequest.Builder requestBuilder =
1716                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1717         configurePreviewOutput(requestBuilder);
1718 
1719         // Warm up pipeline for more accurate timing
1720         SimpleCaptureCallback warmupListener =  new SimpleCaptureCallback();
1721         mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler);
1722         warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1723 
1724         // Do manual captures
1725         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
1726         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1727 
1728         long[] expTimesNs = getExposureTimeTestValues();
1729         int[] sensitivities = getSensitivityTestValues();
1730         // Submit single request at a time, then verify the result.
1731         for (int i = 0; i < expTimesNs.length; i++) {
1732             for (int j = 0; j < sensitivities.length; j++) {
1733                 if (VERBOSE) {
1734                     Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity "
1735                             + sensitivities[j] + ", exposure time " + expTimesNs[i] + "ns");
1736                 }
1737 
1738                 changeExposure(requestBuilder, expTimesNs[i], sensitivities[j]);
1739                 mSession.capture(requestBuilder.build(), listener, mHandler);
1740 
1741                 // make sure timeout is long enough for long exposure time - add a 2x safety margin
1742                 // to exposure time
1743                 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * expTimesNs[i] / 1000000;
1744                 CaptureResult result = listener.getCaptureResult(timeoutMs);
1745                 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1746                 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1747                 validateExposureTime(expTimesNs[i], resultExpTimeNs);
1748                 validateSensitivity(sensitivities[j], resultSensitivity);
1749                 validateFrameDurationForCapture(result);
1750             }
1751         }
1752         mSession.stopRepeating();
1753 
1754         // TODO: Add another case to test where we can submit all requests, then wait for
1755         // results, which will hide the pipeline latency. this is not only faster, but also
1756         // test high speed per frame control and synchronization.
1757     }
1758 
1759 
1760     /**
1761      * Verify black level lock control.
1762      */
verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)1763     private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified,
1764             int maxLockOffCnt) throws Exception {
1765         int noLockCnt = 0;
1766         for (int i = 0; i < numFramesVerified; i++) {
1767             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1768             Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK);
1769             assertNotNull("Black level lock result shouldn't be null", blackLevelLock);
1770 
1771             // Count the lock == false result, which could possibly occur at most once.
1772             if (blackLevelLock == false) {
1773                 noLockCnt++;
1774             }
1775 
1776             if(VERBOSE) {
1777                 Log.v(TAG, "Black level lock result: " + blackLevelLock);
1778             }
1779         }
1780         assertTrue("Black level lock OFF occurs " + noLockCnt + " times,  expect at most "
1781                 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt);
1782     }
1783 
1784     /**
1785      * Verify shading map for different shading modes.
1786      */
verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)1787     private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified,
1788             int shadingMode) throws Exception {
1789 
1790         for (int i = 0; i < numFramesVerified; i++) {
1791             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1792             mCollector.expectEquals("Shading mode result doesn't match request",
1793                     shadingMode, result.get(CaptureResult.SHADING_MODE));
1794             LensShadingMap mapObj = result.get(
1795                     CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
1796             assertNotNull("Map object must not be null", mapObj);
1797             int numElementsInMap = mapObj.getGainFactorCount();
1798             float[] map = new float[numElementsInMap];
1799             mapObj.copyGainFactors(map, /*offset*/0);
1800             assertNotNull("Map must not be null", map);
1801             assertFalse(String.format(
1802                     "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE),
1803                     numElementsInMap >= MAX_SHADING_MAP_SIZE);
1804             assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap,
1805                     MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE);
1806 
1807             if (shadingMode == CaptureRequest.SHADING_MODE_FAST ||
1808                     shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) {
1809                 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all
1810                 // elements >= 1.0f
1811 
1812                 int badValueCnt = 0;
1813                 // Detect the bad values of the map data.
1814                 for (int j = 0; j < numElementsInMap; j++) {
1815                     if (Float.isNaN(map[j]) || map[j] < 1.0f) {
1816                         badValueCnt++;
1817                     }
1818                 }
1819                 assertEquals("Number of value in the map is " + badValueCnt + " out of "
1820                         + numElementsInMap, /*expected*/0, /*actual*/badValueCnt);
1821             } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) {
1822                 float[] unityMap = new float[numElementsInMap];
1823                 Arrays.fill(unityMap, 1.0f);
1824                 // shading mode is OFF, expect to receive a unity map.
1825                 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map",
1826                         Arrays.equals(unityMap, map));
1827             }
1828         }
1829     }
1830 
1831     /**
1832      * Test face detection for a camera.
1833      */
1834     private void faceDetectionTestByCamera() throws Exception {
1835         int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
1836 
1837         SimpleCaptureCallback listener;
1838         CaptureRequest.Builder requestBuilder =
1839                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1840 
1841         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1842         for (int mode : faceDetectModes) {
1843             requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode);
1844             if (VERBOSE) {
1845                 Log.v(TAG, "Start testing face detection mode " + mode);
1846             }
1847 
1848             // Create a new listener for each run to avoid the results from one run spill
1849             // into another run.
1850             listener = new SimpleCaptureCallback();
1851             startPreview(requestBuilder, maxPreviewSz, listener);
1852             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1853             verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode);
1854         }
1855 
1856         stopPreview();
1857     }
1858 
1859     /**
1860      * Verify face detection results for different face detection modes.
1861      *
1862      * @param listener The listener to get capture result
1863      * @param numFramesVerified Number of results to be verified
1864      * @param faceDetectionMode Face detection mode to be verified against
1865      */
1866     private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified,
1867             int faceDetectionMode) {
1868         for (int i = 0; i < numFramesVerified; i++) {
1869             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1870             mCollector.expectEquals("Result face detection mode should match the request",
1871                     faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE));
1872 
1873             Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
1874             List<Integer> faceIds = new ArrayList<Integer>(faces.length);
1875             List<Integer> faceScores = new ArrayList<Integer>(faces.length);
1876             if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
1877                 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode",
1878                         0, faces.length);
1879             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
1880                 for (Face face : faces) {
1881                     mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds());
1882                     faceScores.add(face.getScore());
1883                     mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode",
1884                             face.getId() == Face.ID_UNSUPPORTED);
1885                 }
1886             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
1887                 if (VERBOSE) {
1888                     Log.v(TAG, "Number of faces detected: " + faces.length);
1889                 }
1890 
1891                 for (Face face : faces) {
1892                     Rect faceBound;
1893                     boolean faceRectAvailable =  mCollector.expectTrue("Face rectangle "
1894                             + "shouldn't be null", face.getBounds() != null);
1895                     if (!faceRectAvailable) {
1896                         continue;
1897                     }
1898                     faceBound = face.getBounds();
1899 
1900                     faceScores.add(face.getScore());
1901                     faceIds.add(face.getId());
1902 
1903                     mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode",
1904                             face.getId() != Face.ID_UNSUPPORTED);
1905                     boolean leftEyeAvailable =
1906                             mCollector.expectTrue("Left eye position shouldn't be null",
1907                                     face.getLeftEyePosition() != null);
1908                     boolean rightEyeAvailable =
1909                             mCollector.expectTrue("Right eye position shouldn't be null",
1910                                     face.getRightEyePosition() != null);
1911                     boolean mouthAvailable =
1912                             mCollector.expectTrue("Mouth position shouldn't be null",
1913                             face.getMouthPosition() != null);
1914                     // Eyes/mouth position should be inside of the face rect.
1915                     if (leftEyeAvailable) {
1916                         Point leftEye = face.getLeftEyePosition();
1917                         mCollector.expectTrue("Left eye " + leftEye + "should be"
1918                                 + "inside of face rect " + faceBound,
1919                                 faceBound.contains(leftEye.x, leftEye.y));
1920                     }
1921                     if (rightEyeAvailable) {
1922                         Point rightEye = face.getRightEyePosition();
1923                         mCollector.expectTrue("Right eye " + rightEye + "should be"
1924                                 + "inside of face rect " + faceBound,
1925                                 faceBound.contains(rightEye.x, rightEye.y));
1926                     }
1927                     if (mouthAvailable) {
1928                         Point mouth = face.getMouthPosition();
1929                         mCollector.expectTrue("Mouth " + mouth +  " should be inside of"
1930                                 + " face rect " + faceBound,
1931                                 faceBound.contains(mouth.x, mouth.y));
1932                     }
1933                 }
1934             }
1935             mCollector.expectValuesInRange("Face scores are invalid", faceScores,
1936                     Face.SCORE_MIN, Face.SCORE_MAX);
1937             mCollector.expectValuesUnique("Face ids are invalid", faceIds);
1938         }
1939     }
1940 
1941     /**
1942      * Test tone map mode and result by camera
1943      */
1944     private void toneMapTestByCamera() throws Exception {
1945         if (!mStaticInfo.isManualToneMapSupported()) {
1946             return;
1947         }
1948 
1949         CaptureRequest.Builder requestBuilder =
1950                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1951         int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
1952         for (int mode : toneMapModes) {
1953             if (VERBOSE) {
1954                 Log.v(TAG, "Testing tonemap mode " + mode);
1955             }
1956 
1957             requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode);
1958             switch (mode) {
1959                 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE:
1960                     TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR,
1961                             TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR);
1962                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
1963                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1964 
1965                     toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB,
1966                             TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB);
1967                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
1968                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1969                     break;
1970                 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE:
1971                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f);
1972                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1973                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f);
1974                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1975                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f);
1976                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1977                     break;
1978                 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE:
1979                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
1980                             CaptureRequest.TONEMAP_PRESET_CURVE_REC709);
1981                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1982                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
1983                             CaptureRequest.TONEMAP_PRESET_CURVE_SRGB);
1984                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1985                     break;
1986                 default:
1987                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1988                     break;
1989             }
1990         }
1991 
1992 
1993     }
1994 
1995     /**
1996      * Test tonemap mode with speficied request settings
1997      *
1998      * @param numFramesVerified Number of results to be verified
1999      * @param requestBuilder the request builder of settings to be tested
2000      */
2001     private void testToneMapMode (int numFramesVerified,
2002             CaptureRequest.Builder requestBuilder)  throws Exception  {
2003         final int MIN_TONEMAP_CURVE_POINTS = 2;
2004         final Float ZERO = new Float(0);
2005         final Float ONE = new Float(1.0f);
2006 
2007         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2008         int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE);
2009         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
2010         startPreview(requestBuilder, maxPreviewSz, listener);
2011         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2012 
2013         int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked();
2014         for (int i = 0; i < numFramesVerified; i++) {
2015             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2016             mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode,
2017                     result.get(CaptureResult.TONEMAP_MODE));
2018             TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE);
2019             int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED);
2020             float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE];
2021             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN);
2022             float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE];
2023             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE);
2024             float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE];
2025             tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0);
2026             tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0);
2027             tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0);
2028             if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) {
2029                 /**
2030                  * TODO: need figure out a good way to measure the difference
2031                  * between request and result, as they may have different array
2032                  * size.
2033                  */
2034             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) {
2035                 mCollector.expectEquals("Capture result gamma value should match request",
2036                         requestBuilder.get(CaptureRequest.TONEMAP_GAMMA),
2037                         result.get(CaptureResult.TONEMAP_GAMMA));
2038             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) {
2039                 mCollector.expectEquals("Capture result preset curve should match request",
2040                         requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE),
2041                         result.get(CaptureResult.TONEMAP_PRESET_CURVE));
2042             }
2043 
2044             // Tonemap curve result availability and basic sanity check for all modes.
2045             mCollector.expectValuesInRange("Tonemap curve red values are out of range",
2046                     CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
2047             mCollector.expectInRange("Tonemap curve red length is out of range",
2048                     mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2049             mCollector.expectValuesInRange("Tonemap curve green values are out of range",
2050                     CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
2051             mCollector.expectInRange("Tonemap curve green length is out of range",
2052                     mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2053             mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
2054                     CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
2055             mCollector.expectInRange("Tonemap curve blue length is out of range",
2056                     mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2057         }
2058         stopPreview();
2059     }
2060 
2061     /**
2062      * Test awb mode control.
2063      * <p>
2064      * Test each supported AWB mode, verify the AWB mode in capture result
2065      * matches request. When AWB is locked, the color correction gains and
2066      * transform should remain unchanged.
2067      * </p>
2068      */
2069     private void awbModeAndLockTestByCamera() throws Exception {
2070         int[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
2071         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2072         boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
2073         CaptureRequest.Builder requestBuilder =
2074                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2075         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2076 
2077         for (int mode : awbModes) {
2078             SimpleCaptureCallback listener;
2079             requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode);
2080             listener = new SimpleCaptureCallback();
2081             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2082             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2083 
2084             // Verify AWB mode in capture result.
2085             verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener,
2086                     NUM_FRAMES_VERIFIED);
2087 
2088             if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) {
2089                 // Verify color correction transform and gains stay unchanged after a lock.
2090                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
2091                 listener = new SimpleCaptureCallback();
2092                 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2093                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2094 
2095                 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) {
2096                     waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE,
2097                             CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT);
2098                 }
2099 
2100             }
2101             // Don't verify auto mode result if AWB lock is not supported
2102             if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) {
2103                 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED);
2104             }
2105         }
2106     }
2107 
2108     private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener,
2109             int numFramesVerified) {
2110         // Skip check if cc gains/transform/mode are not available
2111         if (!mStaticInfo.areKeysAvailable(
2112                 CaptureResult.COLOR_CORRECTION_GAINS,
2113                 CaptureResult.COLOR_CORRECTION_TRANSFORM,
2114                 CaptureResult.COLOR_CORRECTION_MODE)) {
2115             return;
2116         }
2117 
2118         CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2119         RggbChannelVector lockedGains =
2120                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2121         ColorSpaceTransform lockedTransform =
2122                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2123 
2124         for (int i = 0; i < numFramesVerified; i++) {
2125             result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2126             // Color correction mode check is skipped here, as it is checked in colorCorrectionTest.
2127             validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE));
2128 
2129             RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2130             ColorSpaceTransform transform =
2131                     getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2132             mCollector.expectEquals("Color correction gains should remain unchanged after awb lock",
2133                     lockedGains, gains);
2134             mCollector.expectEquals("Color correction transform should remain unchanged after"
2135                     + " awb lock", lockedTransform, transform);
2136         }
2137     }
2138 
2139     /**
2140      * Test AF mode control.
2141      * <p>
2142      * Test all supported AF modes, verify the AF mode in capture result matches
2143      * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode,
2144      * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED
2145      * state within certain amount of frames.
2146      * </p>
2147      */
2148     private void afModeTestByCamera() throws Exception {
2149         int[] afModes = mStaticInfo.getAfAvailableModesChecked();
2150         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2151         CaptureRequest.Builder requestBuilder =
2152                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2153         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2154 
2155         for (int mode : afModes) {
2156             SimpleCaptureCallback listener;
2157             requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode);
2158             listener = new SimpleCaptureCallback();
2159             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2160             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2161 
2162             // Verify AF mode in capture result.
2163             verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener,
2164                     NUM_FRAMES_VERIFIED);
2165 
2166             // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes.
2167             // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily
2168             // result in a passive AF call if the camera has already been focused, and the scene has
2169             // not changed enough to trigger an AF pass.  Skip this constraint for LEGACY.
2170             if (mStaticInfo.isHardwareLevelAtLeastLimited() &&
2171                     (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ||
2172                     mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
2173                 List<Integer> afStateList = new ArrayList<Integer>();
2174                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED);
2175                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED);
2176                 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList,
2177                         NUM_RESULTS_WAIT_TIMEOUT);
2178             }
2179         }
2180     }
2181 
2182     /**
2183      * Test video and optical stabilizations if they are supported by a given camera.
2184      */
2185     private void stabilizationTestByCamera() throws Exception {
2186         // video stabilization test.
2187         List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
2188 
2189         Integer[] videoStabModes = (keys.contains(CameraCharacteristics.
2190                 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ?
2191                 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) :
2192                     new Integer[0];
2193         int[] opticalStabModes = (keys.contains(
2194                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ?
2195                 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0];
2196 
2197         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2198         CaptureRequest.Builder requestBuilder =
2199                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2200         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2201         startPreview(requestBuilder, maxPreviewSize, listener);
2202 
2203         for (Integer mode : videoStabModes) {
2204             listener = new SimpleCaptureCallback();
2205             requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode);
2206             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2207             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2208             // Video stabilization could return any modes.
2209             verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE,
2210                     videoStabModes, listener, NUM_FRAMES_VERIFIED);
2211         }
2212 
2213         for (int mode : opticalStabModes) {
2214             listener = new SimpleCaptureCallback();
2215             requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode);
2216             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2217             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2218             verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode,
2219                     listener, NUM_FRAMES_VERIFIED);
2220         }
2221 
2222         stopPreview();
2223     }
2224 
2225     private void digitalZoomTestByCamera(Size previewSize) throws Exception {
2226         final int ZOOM_STEPS = 15;
2227         final PointF[] TEST_ZOOM_CENTERS;
2228         final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked();
2229         final float ZOOM_ERROR_MARGIN = 0.01f;
2230         if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) {
2231             // It doesn't make much sense to test the zoom if the device effectively supports
2232             // no zoom.
2233             return;
2234         }
2235 
2236         final int croppingType = mStaticInfo.getScalerCroppingTypeChecked();
2237         if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) {
2238             // Set the four corners in a way that the minimally allowed zoom factor is 2x.
2239             float normalizedLeft = 0.25f;
2240             float normalizedTop = 0.25f;
2241             float normalizedRight = 0.75f;
2242             float normalizedBottom = 0.75f;
2243             // If the max supported zoom is too small, make sure we at least test the max
2244             // Zoom is tested for the four corners.
2245             if (maxZoom < 2.0f) {
2246                 normalizedLeft = 0.5f / maxZoom;
2247                 normalizedTop = 0.5f / maxZoom;
2248                 normalizedRight = 1.0f - normalizedLeft;
2249                 normalizedBottom = 1.0f - normalizedTop;
2250             }
2251             TEST_ZOOM_CENTERS = new PointF[] {
2252                 new PointF(0.5f, 0.5f),   // Center point
2253                 new PointF(normalizedLeft, normalizedTop),     // top left corner zoom
2254                 new PointF(normalizedRight, normalizedTop),    // top right corner zoom
2255                 new PointF(normalizedLeft, normalizedBottom),  // bottom left corner zoom
2256                 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom
2257             };
2258 
2259             if (VERBOSE) {
2260                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM");
2261             }
2262         } else {
2263             // CENTER_ONLY
2264             TEST_ZOOM_CENTERS = new PointF[] {
2265                     new PointF(0.5f, 0.5f),   // Center point
2266             };
2267 
2268             if (VERBOSE) {
2269                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY");
2270             }
2271         }
2272 
2273         final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
2274         Rect[] cropRegions = new Rect[ZOOM_STEPS];
2275         MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
2276         CaptureRequest.Builder requestBuilder =
2277                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2278         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2279 
2280         updatePreviewSurface(previewSize);
2281         configurePreviewOutput(requestBuilder);
2282 
2283         CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS];
2284 
2285         // Set algorithm regions to full active region
2286         // TODO: test more different 3A regions
2287         final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] {
2288                 new MeteringRectangle (
2289                         /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(),
2290                         /*meteringWeight*/1)
2291         };
2292 
2293         for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2294             update3aRegion(requestBuilder, algo,  defaultMeteringRect);
2295         }
2296 
2297         final int CAPTURE_SUBMIT_REPEAT;
2298         {
2299             int maxLatency = mStaticInfo.getSyncMaxLatency();
2300             if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
2301                 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1;
2302             } else {
2303                 CAPTURE_SUBMIT_REPEAT = maxLatency + 1;
2304             }
2305         }
2306 
2307         if (VERBOSE) {
2308             Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT);
2309         }
2310 
2311         for (PointF center : TEST_ZOOM_CENTERS) {
2312             Rect previousCrop = null;
2313 
2314             for (int i = 0; i < ZOOM_STEPS; i++) {
2315                 /*
2316                  * Submit capture request
2317                  */
2318                 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS);
2319                 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, maxZoom, activeArraySize);
2320                 if (VERBOSE) {
2321                     Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " +
2322                             center + " The cropRegion is " + cropRegions[i] +
2323                             " Preview size is " + previewSize);
2324                 }
2325                 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]);
2326                 requests[i] = requestBuilder.build();
2327                 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) {
2328                     if (VERBOSE) {
2329                         Log.v(TAG, "submit crop region " + cropRegions[i]);
2330                     }
2331                     mSession.capture(requests[i], listener, mHandler);
2332                 }
2333 
2334                 /*
2335                  * Validate capture result
2336                  */
2337                 waitForNumResults(listener, CAPTURE_SUBMIT_REPEAT - 1); // Drop first few frames
2338                 CaptureResult result = listener.getCaptureResultForRequest(
2339                         requests[i], NUM_RESULTS_WAIT_TIMEOUT);
2340                 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
2341 
2342                 /*
2343                  * Validate resulting crop regions
2344                  */
2345                 if (previousCrop != null) {
2346                     Rect currentCrop = cropRegion;
2347                     mCollector.expectTrue(String.format(
2348                             "Crop region should shrink or stay the same " +
2349                                     "(previous = %s, current = %s)",
2350                                     previousCrop, currentCrop),
2351                             previousCrop.equals(currentCrop) ||
2352                                 (previousCrop.width() > currentCrop.width() &&
2353                                  previousCrop.height() > currentCrop.height()));
2354                 }
2355 
2356                 if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2357                     mCollector.expectRectsAreSimilar(
2358                             "Request and result crop region should be similar",
2359                             cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA);
2360                 }
2361 
2362                 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) {
2363                     mCollector.expectRectCentered(
2364                             "Result crop region should be centered inside the active array",
2365                             new Size(activeArraySize.width(), activeArraySize.height()),
2366                             cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED);
2367                 }
2368 
2369                 /*
2370                  * Validate resulting metering regions
2371                  */
2372 
2373                 // Use the actual reported crop region to calculate the resulting metering region
2374                 expectRegions[i] = getExpectedOutputRegion(
2375                         /*requestRegion*/defaultMeteringRect,
2376                         /*cropRect*/     cropRegion);
2377 
2378                 // Verify Output 3A region is intersection of input 3A region and crop region
2379                 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2380                     validate3aRegion(result, algo, expectRegions[i]);
2381                 }
2382 
2383                 previousCrop = cropRegion;
2384             }
2385 
2386             if (maxZoom > 1.0f) {
2387                 mCollector.expectTrue(
2388                         String.format("Most zoomed-in crop region should be smaller" +
2389                                         "than active array w/h" +
2390                                         "(last crop = %s, active array = %s)",
2391                                         previousCrop, activeArraySize),
2392                             (previousCrop.width() < activeArraySize.width() &&
2393                              previousCrop.height() < activeArraySize.height()));
2394             }
2395         }
2396     }
2397 
2398     private void digitalZoomPreviewCombinationTestByCamera() throws Exception {
2399         final double ASPECT_RATIO_THRESHOLD = 0.001;
2400         List<Double> aspectRatiosTested = new ArrayList<Double>();
2401         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2402         aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight());
2403 
2404         for (Size size : mOrderedPreviewSizes) {
2405             // Max preview size was already tested in testDigitalZoom test. skip it.
2406             if (size.equals(maxPreviewSize)) {
2407                 continue;
2408             }
2409 
2410             // Only test the largest size for each aspect ratio.
2411             double aspectRatio = (double)(size.getWidth()) / size.getHeight();
2412             if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) {
2413                 continue;
2414             }
2415 
2416             if (VERBOSE) {
2417                 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom");
2418             }
2419 
2420             aspectRatiosTested.add(aspectRatio);
2421             digitalZoomTestByCamera(size);
2422         }
2423     }
2424 
2425     private static boolean isAspectRatioContained(List<Double> aspectRatioList,
2426             double aspectRatio, double delta) {
2427         for (Double ratio : aspectRatioList) {
2428             if (Math.abs(ratio - aspectRatio) < delta) {
2429                 return true;
2430             }
2431         }
2432 
2433         return false;
2434     }
2435 
2436     private void sceneModeTestByCamera() throws Exception {
2437         int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
2438         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2439         CaptureRequest.Builder requestBuilder =
2440                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2441         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2442         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
2443         startPreview(requestBuilder, maxPreviewSize, listener);
2444 
2445         for(int mode : sceneModes) {
2446             requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode);
2447             listener = new SimpleCaptureCallback();
2448             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2449             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2450 
2451             verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE,
2452                     mode, listener, NUM_FRAMES_VERIFIED);
2453             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2454             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2455                     CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED);
2456         }
2457     }
2458 
2459     private void effectModeTestByCamera() throws Exception {
2460         int[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
2461         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2462         CaptureRequest.Builder requestBuilder =
2463                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2464         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
2465         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2466         startPreview(requestBuilder, maxPreviewSize, listener);
2467 
2468         for(int mode : effectModes) {
2469             requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode);
2470             listener = new SimpleCaptureCallback();
2471             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2472             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2473 
2474             verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE,
2475                     mode, listener, NUM_FRAMES_VERIFIED);
2476             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2477             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2478                     CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED);
2479         }
2480     }
2481 
2482     //----------------------------------------------------------------
2483     //---------Below are common functions for all tests.--------------
2484     //----------------------------------------------------------------
2485 
2486     /**
2487      * Enable exposure manual control and change exposure and sensitivity and
2488      * clamp the value into the supported range.
2489      */
2490     private void changeExposure(CaptureRequest.Builder requestBuilder,
2491             long expTime, int sensitivity) {
2492         // Check if the max analog sensitivity is available and no larger than max sensitivity.
2493         // The max analog sensitivity is not actually used here. This is only an extra sanity check.
2494         mStaticInfo.getMaxAnalogSensitivityChecked();
2495 
2496         expTime = mStaticInfo.getExposureClampToRange(expTime);
2497         sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity);
2498 
2499         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
2500         requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime);
2501         requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
2502     }
2503     /**
2504      * Enable exposure manual control and change exposure time and
2505      * clamp the value into the supported range.
2506      *
2507      * <p>The sensitivity is set to default value.</p>
2508      */
2509     private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) {
2510         changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY);
2511     }
2512 
2513     /**
2514      * Get the exposure time array that contains multiple exposure time steps in
2515      * the exposure time range, in nanoseconds.
2516      */
2517     private long[] getExposureTimeTestValues() {
2518         long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1];
2519         long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS);
2520         long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS);
2521 
2522         long range = maxExpTime - minExpTime;
2523         double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS;
2524         for (int i = 0; i < testValues.length; i++) {
2525             testValues[i] = maxExpTime - (long)(stepSize * i);
2526             testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]);
2527         }
2528 
2529         return testValues;
2530     }
2531 
2532     /**
2533      * Generate test focus distances in range of [0, minFocusDistance] in increasing order.
2534      *
2535      * @param repeatMin number of times minValue will be repeated.
2536      * @param repeatMax number of times maxValue will be repeated.
2537      */
2538     private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) {
2539         int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax;
2540         float[] testValues = new float[totalCount];
2541         float minValue = 0;
2542         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
2543 
2544         float range = maxValue - minValue;
2545         float stepSize = range / NUM_TEST_FOCUS_DISTANCES;
2546 
2547         for (int i = 0; i < repeatMin; i++) {
2548             testValues[i] = minValue;
2549         }
2550         for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) {
2551             testValues[repeatMin+i] = minValue + stepSize * i;
2552         }
2553         for (int i = 0; i < repeatMax; i++) {
2554             testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] =
2555                     maxValue;
2556         }
2557 
2558         return testValues;
2559     }
2560 
2561     /**
2562      * Get the sensitivity array that contains multiple sensitivity steps in the
2563      * sensitivity range.
2564      * <p>
2565      * Sensitivity number of test values is determined by
2566      * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and
2567      * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}.
2568      * </p>
2569      */
2570     private int[] getSensitivityTestValues() {
2571         int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault(
2572                 DEFAULT_SENSITIVITY);
2573         int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault(
2574                 DEFAULT_SENSITIVITY);
2575 
2576         int range = maxSensitivity - minSensitivity;
2577         int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE;
2578         int numSteps = range / stepSize;
2579         // Bound the test steps to avoid supper long test.
2580         if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) {
2581             numSteps = DEFAULT_NUM_SENSITIVITY_STEPS;
2582             stepSize = range / numSteps;
2583         }
2584         int[] testValues = new int[numSteps + 1];
2585         for (int i = 0; i < testValues.length; i++) {
2586             testValues[i] = maxSensitivity - stepSize * i;
2587             testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]);
2588         }
2589 
2590         return testValues;
2591     }
2592 
2593     /**
2594      * Validate the AE manual control exposure time.
2595      *
2596      * <p>Exposure should be close enough, and only round down if they are not equal.</p>
2597      *
2598      * @param request Request exposure time
2599      * @param result Result exposure time
2600      */
2601     private void validateExposureTime(long request, long result) {
2602         long expTimeDelta = request - result;
2603         long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request
2604                 * EXPOSURE_TIME_ERROR_MARGIN_RATE));
2605         // First, round down not up, second, need close enough.
2606         mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: "
2607                 + request + " result: " + result,
2608                 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0);
2609     }
2610 
2611     /**
2612      * Validate AE manual control sensitivity.
2613      *
2614      * @param request Request sensitivity
2615      * @param result Result sensitivity
2616      */
2617     private void validateSensitivity(int request, int result) {
2618         float sensitivityDelta = request - result;
2619         float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE;
2620         // First, round down not up, second, need close enough.
2621         mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: "
2622                 + request + " result: " + result,
2623                 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0);
2624     }
2625 
2626     /**
2627      * Validate frame duration for a given capture.
2628      *
2629      * <p>Frame duration should be longer than exposure time.</p>
2630      *
2631      * @param result The capture result for a given capture
2632      */
2633     private void validateFrameDurationForCapture(CaptureResult result) {
2634         long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2635         long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
2636         if (VERBOSE) {
2637             Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime);
2638         }
2639 
2640         mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure"
2641                 + " time (%d) for a given capture", frameDuration, expTime),
2642                 frameDuration >= expTime);
2643 
2644         validatePipelineDepth(result);
2645     }
2646 
2647     /**
2648      * Basic verification for the control mode capture result.
2649      *
2650      * @param key The capture result key to be verified against
2651      * @param requestMode The request mode for this result
2652      * @param listener The capture listener to get capture results
2653      * @param numFramesVerified The number of capture results to be verified
2654      */
2655     private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode,
2656             SimpleCaptureCallback listener, int numFramesVerified) {
2657         for (int i = 0; i < numFramesVerified; i++) {
2658             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2659             validatePipelineDepth(result);
2660             T resultMode = getValueNotNull(result, key);
2661             if (VERBOSE) {
2662                 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: "
2663                         + resultMode.toString());
2664             }
2665             mCollector.expectEquals("Key " + key.getName() + " result should match request",
2666                     requestMode, resultMode);
2667         }
2668     }
2669 
2670     /**
2671      * Basic verification that the value of a capture result key should be one of the expected
2672      * values.
2673      *
2674      * @param key The capture result key to be verified against
2675      * @param expectedModes The list of any possible expected modes for this result
2676      * @param listener The capture listener to get capture results
2677      * @param numFramesVerified The number of capture results to be verified
2678      */
2679     private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes,
2680             SimpleCaptureCallback listener, int numFramesVerified) {
2681         for (int i = 0; i < numFramesVerified; i++) {
2682             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2683             validatePipelineDepth(result);
2684             T resultMode = getValueNotNull(result, key);
2685             if (VERBOSE) {
2686                 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: "
2687                         + resultMode.toString());
2688             }
2689             // Capture result should be one of the expected values.
2690             mCollector.expectContains(expectedModes, resultMode);
2691         }
2692     }
2693 
2694     /**
2695      * Verify if the fps is slow down for given input request with certain
2696      * controls inside.
2697      * <p>
2698      * This method selects a max preview size for each fps range, and then
2699      * configure the preview stream. Preview is started with the max preview
2700      * size, and then verify if the result frame duration is in the frame
2701      * duration range.
2702      * </p>
2703      *
2704      * @param requestBuilder The request builder that contains post-processing
2705      *            controls that could impact the output frame rate, such as
2706      *            {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of
2707      *            these controls must be set to some values such that the frame
2708      *            rate is not slow down.
2709      * @param numFramesVerified The number of frames to be verified
2710      * @param fpsRanges The fps ranges to be verified
2711      */
2712     private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder,
2713             int numFramesVerified, List<Range<Integer>> fpsRanges )  throws Exception {
2714         boolean frameDurationAvailable = true;
2715         // Allow a few frames for AE to settle on target FPS range
2716         final int NUM_FRAME_TO_SKIP = 6;
2717         float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN;
2718         if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) {
2719             frameDurationAvailable = false;
2720             // Allow a larger error margin (1.5%) for timestamps
2721             frameDurationErrorMargin = 0.015f;
2722         }
2723         if (mStaticInfo.isExternalCamera()) {
2724             // Allow a even larger error margin (15%) for external camera timestamps
2725             frameDurationErrorMargin = 0.15f;
2726         }
2727 
2728         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
2729         Range<Integer> fpsRange;
2730         SimpleCaptureCallback resultListener;
2731 
2732         for (int i = 0; i < fpsRanges.size(); i += 1) {
2733             fpsRange = fpsRanges.get(i);
2734             Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange);
2735             // If unable to find a preview size, then log the failure, and skip this run.
2736             if (previewSz == null) {
2737                 if (mStaticInfo.isCapabilitySupported(
2738                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
2739                     mCollector.addMessage(String.format(
2740                             "Unable to find a preview size supporting given fps range %s",
2741                             fpsRange));
2742                 }
2743                 continue;
2744             }
2745 
2746             if (VERBOSE) {
2747                 Log.v(TAG, String.format("Test fps range %s for preview size %s",
2748                         fpsRange, previewSz.toString()));
2749             }
2750             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
2751             // Turn off auto antibanding to avoid exposure time and frame duration interference
2752             // from antibanding algorithm.
2753             if (antiBandingOffIsSupported) {
2754                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
2755                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
2756             } else {
2757                 // The device doesn't implement the OFF mode, test continues. It need make sure
2758                 // that the antibanding algorithm doesn't slow down the fps.
2759                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
2760                         " not slow down the frame rate regardless of its current antibanding" +
2761                         " mode");
2762             }
2763 
2764             resultListener = new SimpleCaptureCallback();
2765             startPreview(requestBuilder, previewSz, resultListener);
2766             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2767             // Wait several more frames for AE to settle on target FPS range
2768             waitForNumResults(resultListener, NUM_FRAME_TO_SKIP);
2769 
2770             long[] frameDurationRange = new long[]{
2771                     (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
2772             long captureTime = 0, prevCaptureTime = 0;
2773             for (int j = 0; j < numFramesVerified; j++) {
2774                 long frameDuration = frameDurationRange[0];
2775                 CaptureResult result =
2776                         resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2777                 validatePipelineDepth(result);
2778                 if (frameDurationAvailable) {
2779                     frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
2780                 } else {
2781                     // if frame duration is not available, check timestamp instead
2782                     captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP);
2783                     if (j > 0) {
2784                         frameDuration = captureTime - prevCaptureTime;
2785                     }
2786                     prevCaptureTime = captureTime;
2787                 }
2788                 mCollector.expectInRange(
2789                         "Frame duration must be in the range of " +
2790                                 Arrays.toString(frameDurationRange),
2791                         frameDuration,
2792                         (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)),
2793                         (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin)));
2794             }
2795         }
2796 
2797         stopPreview();
2798     }
2799 
2800     /**
2801      * Validate the pipeline depth result.
2802      *
2803      * @param result The capture result to get pipeline depth data
2804      */
2805     private void validatePipelineDepth(CaptureResult result) {
2806         final byte MIN_PIPELINE_DEPTH = 1;
2807         byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked();
2808         Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH);
2809         mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]",
2810                 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH,
2811                 maxPipelineDepth);
2812     }
2813 
2814     /**
2815      * Calculate the anti-flickering corrected exposure time.
2816      * <p>
2817      * If the input exposure time is very short (shorter than flickering
2818      * boundary), which indicate the scene is bright and very likely at outdoor
2819      * environment, skip the correction, as it doesn't make much sense by doing so.
2820      * </p>
2821      * <p>
2822      * For long exposure time (larger than the flickering boundary), find the
2823      * exposure time that is closest to the flickering boundary.
2824      * </p>
2825      *
2826      * @param flickeringMode The flickering mode
2827      * @param exposureTime The input exposureTime to be corrected
2828      * @return anti-flickering corrected exposure time
2829      */
2830     private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) {
2831         if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) {
2832             throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz");
2833         }
2834         long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS;
2835         if (flickeringMode == ANTI_FLICKERING_60HZ) {
2836             flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS;
2837         }
2838 
2839         if (exposureTime <= flickeringBoundary) {
2840             return exposureTime;
2841         }
2842 
2843         // Find the closest anti-flickering corrected exposure time
2844         long correctedExpTime = exposureTime + (flickeringBoundary / 2);
2845         correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary);
2846         return correctedExpTime;
2847     }
2848 
2849     /**
2850      * Update one 3A region in capture request builder if that region is supported. Do nothing
2851      * if the specified 3A region is not supported by camera device.
2852      * @param requestBuilder The request to be updated
2853      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2854      * @param regions The 3A regions to be set
2855      */
2856     private void update3aRegion(
2857             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions)
2858     {
2859         int maxRegions;
2860         CaptureRequest.Key<MeteringRectangle[]> key;
2861 
2862         if (regions == null || regions.length == 0) {
2863             throw new IllegalArgumentException("Invalid input 3A region!");
2864         }
2865 
2866         switch (algoIdx) {
2867             case INDEX_ALGORITHM_AE:
2868                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
2869                 key = CaptureRequest.CONTROL_AE_REGIONS;
2870                 break;
2871             case INDEX_ALGORITHM_AWB:
2872                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
2873                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2874                 break;
2875             case INDEX_ALGORITHM_AF:
2876                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
2877                 key = CaptureRequest.CONTROL_AF_REGIONS;
2878                 break;
2879             default:
2880                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2881         }
2882 
2883         if (maxRegions >= regions.length) {
2884             requestBuilder.set(key, regions);
2885         }
2886     }
2887 
2888     /**
2889      * Validate one 3A region in capture result equals to expected region if that region is
2890      * supported. Do nothing if the specified 3A region is not supported by camera device.
2891      * @param result The capture result to be validated
2892      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2893      * @param expectRegions The 3A regions expected in capture result
2894      */
2895     private void validate3aRegion(
2896             CaptureResult result, int algoIdx, MeteringRectangle[] expectRegions)
2897     {
2898         final int maxCorrectionDist = 2;
2899         int maxRegions;
2900         CaptureResult.Key<MeteringRectangle[]> key;
2901         MeteringRectangle[] actualRegion;
2902 
2903         switch (algoIdx) {
2904             case INDEX_ALGORITHM_AE:
2905                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
2906                 key = CaptureResult.CONTROL_AE_REGIONS;
2907                 break;
2908             case INDEX_ALGORITHM_AWB:
2909                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
2910                 key = CaptureResult.CONTROL_AWB_REGIONS;
2911                 break;
2912             case INDEX_ALGORITHM_AF:
2913                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
2914                 key = CaptureResult.CONTROL_AF_REGIONS;
2915                 break;
2916             default:
2917                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2918         }
2919 
2920         Integer distortionCorrectionMode = result.get(CaptureResult.DISTORTION_CORRECTION_MODE);
2921         boolean correctionEnabled =
2922                 distortionCorrectionMode != null &&
2923                 distortionCorrectionMode != CaptureResult.DISTORTION_CORRECTION_MODE_OFF;
2924 
2925         if (maxRegions > 0)
2926         {
2927             actualRegion = getValueNotNull(result, key);
2928             if (correctionEnabled) {
2929                 for(int i = 0; i < actualRegion.length; i++) {
2930                     Rect a = actualRegion[i].getRect();
2931                     Rect e = expectRegions[i].getRect();
2932                     if (!mCollector.expectLessOrEqual(
2933                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
2934                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2935                         maxCorrectionDist, Math.abs(a.left - e.left))) continue;
2936                     if (!mCollector.expectLessOrEqual(
2937                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
2938                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2939                         maxCorrectionDist, Math.abs(a.right - e.right))) continue;
2940                     if (!mCollector.expectLessOrEqual(
2941                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
2942                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2943                         maxCorrectionDist, Math.abs(a.top - e.top))) continue;
2944                     if (!mCollector.expectLessOrEqual(
2945                         "Expected 3A regions: " + Arrays.toString(expectRegions) +
2946                         " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2947                         maxCorrectionDist, Math.abs(a.bottom - e.bottom))) continue;
2948                 }
2949             } else {
2950                 mCollector.expectEquals(
2951                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2952                     " does not match actual one: " + Arrays.toString(actualRegion),
2953                     expectRegions, actualRegion);
2954             }
2955         }
2956     }
2957 }
2958