1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 import static android.hardware.camera2.CameraCharacteristics.*;
21 
22 import android.graphics.Point;
23 import android.graphics.PointF;
24 import android.graphics.Rect;
25 import android.hardware.camera2.CameraCharacteristics;
26 import android.hardware.camera2.CameraDevice;
27 import android.hardware.camera2.CameraMetadata;
28 import android.hardware.camera2.CaptureRequest;
29 import android.hardware.camera2.CaptureResult;
30 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
31 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
32 import android.hardware.camera2.params.BlackLevelPattern;
33 import android.hardware.camera2.params.ColorSpaceTransform;
34 import android.hardware.camera2.params.Face;
35 import android.hardware.camera2.params.LensShadingMap;
36 import android.hardware.camera2.params.MeteringRectangle;
37 import android.hardware.camera2.params.RggbChannelVector;
38 import android.hardware.camera2.params.TonemapCurve;
39 import android.media.Image;
40 import android.util.Log;
41 import android.util.Range;
42 import android.util.Rational;
43 import android.util.Size;
44 
45 import java.nio.ByteBuffer;
46 import java.util.ArrayList;
47 import java.util.Arrays;
48 import java.util.List;
49 
50 /**
51  * <p>
52  * Basic test for camera CaptureRequest key controls.
53  * </p>
54  * <p>
55  * Several test categories are covered: manual sensor control, 3A control,
56  * manual ISP control and other per-frame control and synchronization.
57  * </p>
58  */
59 public class CaptureRequestTest extends Camera2SurfaceViewTestCase {
60     private static final String TAG = "CaptureRequestTest";
61     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
62     private static final int NUM_FRAMES_VERIFIED = 15;
63     private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60;
64     /** 30ms exposure time must be supported by full capability devices. */
65     private static final long DEFAULT_EXP_TIME_NS = 30000000L;
66     private static final int DEFAULT_SENSITIVITY = 100;
67     private static final int RGGB_COLOR_CHANNEL_COUNT = 4;
68     private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT;
69     private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT;
70     private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L;
71     private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms
72     private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation.
73     private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation.
74     private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
75     private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
76     private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3;
77     private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 16;
78     private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100;
79     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
80     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
81     private static final int NUM_TEST_FOCUS_DISTANCES = 10;
82     private static final int NUM_FOCUS_DISTANCES_REPEAT = 3;
83     // 5 percent error margin for calibrated device
84     private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f;
85     // 25 percent error margin for uncalibrated device
86     private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f;
87     // 10 percent error margin for approximate device
88     private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f;
89     private static final int ANTI_FLICKERING_50HZ = 1;
90     private static final int ANTI_FLICKERING_60HZ = 2;
91     // 5 percent error margin for resulting crop regions
92     private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f;
93     // 1 percent error margin for centering the crop region
94     private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f;
95     private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f;
96     private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f;
97 
98     // Linear tone mapping curve example.
99     private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f};
100     // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points.
101     private static final float[] TONEMAP_CURVE_SRGB = {
102             0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f,
103             0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f,
104             0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f,
105             0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f
106     };
107     private final Rational ZERO_R = new Rational(0, 1);
108     private final Rational ONE_R = new Rational(1, 1);
109 
110     private final int NUM_ALGORITHMS = 3; // AE, AWB and AF
111     private final int INDEX_ALGORITHM_AE = 0;
112     private final int INDEX_ALGORITHM_AWB = 1;
113     private final int INDEX_ALGORITHM_AF = 2;
114 
115     private enum TorchSeqState {
116         RAMPING_UP,
117         FIRED,
118         RAMPING_DOWN
119     }
120 
121     @Override
setUp()122     protected void setUp() throws Exception {
123         super.setUp();
124     }
125 
126     @Override
tearDown()127     protected void tearDown() throws Exception {
128         super.tearDown();
129     }
130 
131     /**
132      * Test black level lock when exposure value change.
133      * <p>
134      * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the
135      * camera device should lock the black level. When the exposure values are changed,
136      * the camera may require reset black level Since changes to certain capture
137      * parameters (such as exposure time) may require resetting of black level
138      * compensation. However, the black level must remain locked after exposure
139      * value changes (when requests have lock ON).
140      * </p>
141      */
testBlackLevelLock()142     public void testBlackLevelLock() throws Exception {
143         for (int i = 0; i < mCameraIds.length; i++) {
144             try {
145                 openDevice(mCameraIds[i]);
146 
147                 if (!mStaticInfo.isCapabilitySupported(
148                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
149                     continue;
150                 }
151 
152                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
153                 CaptureRequest.Builder requestBuilder =
154                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
155 
156                 // Start with default manual exposure time, with black level being locked.
157                 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true);
158                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
159 
160                 Size previewSz =
161                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
162                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
163 
164                 startPreview(requestBuilder, previewSz, listener);
165                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
166                 // No lock OFF state is allowed as the exposure is not changed.
167                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0);
168 
169                 // Double the exposure time and gain, with black level still being locked.
170                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2);
171                 listener = new SimpleCaptureCallback();
172                 startPreview(requestBuilder, previewSz, listener);
173                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
174                 // Allow at most one lock OFF state as the exposure is changed once.
175                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1);
176 
177                 stopPreview();
178             } finally {
179                 closeDevice();
180             }
181         }
182     }
183 
184     /**
185      * Test dynamic black/white levels if they are supported.
186      *
187      * <p>
188      * If the dynamic black and white levels are reported, test below:
189      *   1. the dynamic black and white levels shouldn't deviate from the global value too much
190      *   for different sensitivities.
191      *   2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and
192      *   calculate the optical black level values. The reported dynamic black level should be
193      *   close enough to the optical black level values.
194      * </p>
195      */
testDynamicBlackWhiteLevel()196     public void testDynamicBlackWhiteLevel() throws Exception {
197         for (String id : mCameraIds) {
198             try {
199                 openDevice(id);
200                 if (!mStaticInfo.isDynamicBlackLevelSupported()) {
201                     continue;
202                 }
203                 dynamicBlackWhiteLevelTestByCamera();
204             } finally {
205                 closeDevice();
206             }
207         }
208     }
209 
210     /**
211      * Basic lens shading map request test.
212      * <p>
213      * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will
214      * be applied by the camera device, and an identity lens shading map data
215      * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON.
216      * </p>
217      * <p>
218      * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction
219      * will be applied by the camera device. The lens shading map data can be
220      * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON.
221      * </p>
222      */
testLensShadingMap()223     public void testLensShadingMap() throws Exception {
224         for (int i = 0; i < mCameraIds.length; i++) {
225             try {
226                 openDevice(mCameraIds[i]);
227 
228                 if (!mStaticInfo.isManualLensShadingMapSupported()) {
229                     Log.i(TAG, "Camera " + mCameraIds[i] +
230                             " doesn't support lens shading controls, skipping test");
231                     continue;
232                 }
233 
234                 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject(
235                         mStaticInfo.getAvailableLensShadingMapModesChecked()));
236 
237                 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) {
238                     continue;
239                 }
240 
241                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
242                 CaptureRequest.Builder requestBuilder =
243                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
244                 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
245                         STATISTICS_LENS_SHADING_MAP_MODE_ON);
246 
247                 Size previewSz =
248                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
249                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
250                 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject(
251                         mStaticInfo.getAvailableLensShadingModesChecked()));
252 
253                 // Shading map mode OFF, lensShadingMapMode ON, camera device
254                 // should output unity maps.
255                 if (lensShadingModes.contains(SHADING_MODE_OFF)) {
256                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF);
257                     listener = new SimpleCaptureCallback();
258                     startPreview(requestBuilder, previewSz, listener);
259                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
260                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF);
261                 }
262 
263                 // Shading map mode FAST, lensShadingMapMode ON, camera device
264                 // should output valid maps.
265                 if (lensShadingModes.contains(SHADING_MODE_FAST)) {
266                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST);
267 
268                     listener = new SimpleCaptureCallback();
269                     startPreview(requestBuilder, previewSz, listener);
270                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
271                     // Allow at most one lock OFF state as the exposure is changed once.
272                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST);
273                 }
274 
275                 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device
276                 // should output valid maps.
277                 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) {
278                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY);
279 
280                     listener = new SimpleCaptureCallback();
281                     startPreview(requestBuilder, previewSz, listener);
282                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
283                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY);
284                 }
285 
286                 stopPreview();
287             } finally {
288                 closeDevice();
289             }
290         }
291     }
292 
293     /**
294      * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control.
295      * <p>
296      * Test all available anti-banding modes, check if the exposure time adjustment is
297      * correct.
298      * </p>
299      */
testAntiBandingModes()300     public void testAntiBandingModes() throws Exception {
301         for (int i = 0; i < mCameraIds.length; i++) {
302             try {
303                 openDevice(mCameraIds[i]);
304 
305                 // Without manual sensor control, exposure time cannot be verified
306                 if (!mStaticInfo.isCapabilitySupported(
307                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
308                     continue;
309                 }
310 
311                 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
312 
313                 Size previewSz =
314                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
315                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
316 
317                 for (int mode : modes) {
318                     antiBandingTestByMode(previewSz, mode);
319                 }
320             } finally {
321                 closeDevice();
322             }
323         }
324 
325     }
326 
327     /**
328      * Test AE mode and lock.
329      *
330      * <p>
331      * For AE lock, when it is locked, exposure parameters shouldn't be changed.
332      * For AE modes, each mode should satisfy the per frame controls defined in
333      * API specifications.
334      * </p>
335      */
testAeModeAndLock()336     public void testAeModeAndLock() throws Exception {
337         for (int i = 0; i < mCameraIds.length; i++) {
338             try {
339                 openDevice(mCameraIds[i]);
340                 if (!mStaticInfo.isColorOutputSupported()) {
341                     Log.i(TAG, "Camera " + mCameraIds[i] +
342                             " does not support color outputs, skipping");
343                     continue;
344                 }
345 
346                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
347 
348                 // Update preview surface with given size for all sub-tests.
349                 updatePreviewSurface(maxPreviewSz);
350 
351                 // Test aeMode and lock
352                 int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
353                 for (int mode : aeModes) {
354                     aeModeAndLockTestByMode(mode);
355                 }
356             } finally {
357                 closeDevice();
358             }
359         }
360     }
361 
362     /** Test {@link CaptureRequest#FLASH_MODE} control.
363      * <p>
364      * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control
365      * and {@link CaptureResult#FLASH_STATE} result.
366      * </p>
367      */
testFlashControl()368     public void testFlashControl() throws Exception {
369         for (int i = 0; i < mCameraIds.length; i++) {
370             try {
371                 openDevice(mCameraIds[i]);
372                 if (!mStaticInfo.isColorOutputSupported()) {
373                     Log.i(TAG, "Camera " + mCameraIds[i] +
374                             " does not support color outputs, skipping");
375                     continue;
376                 }
377 
378                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
379                 CaptureRequest.Builder requestBuilder =
380                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
381 
382                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
383 
384                 startPreview(requestBuilder, maxPreviewSz, listener);
385 
386                 // Flash control can only be used when the AE mode is ON or OFF.
387                 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON);
388 
389                 // LEGACY won't support AE mode OFF
390                 boolean aeOffModeSupported = false;
391                 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) {
392                     if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
393                         aeOffModeSupported = true;
394                     }
395                 }
396                 if (aeOffModeSupported) {
397                     flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF);
398                 }
399 
400                 stopPreview();
401             } finally {
402                 closeDevice();
403             }
404         }
405     }
406 
407     /**
408      * Test face detection modes and results.
409      */
testFaceDetection()410     public void testFaceDetection() throws Exception {
411         for (int i = 0; i < mCameraIds.length; i++) {
412             try {
413                 openDevice(mCameraIds[i]);
414                 if (!mStaticInfo.isColorOutputSupported()) {
415                     Log.i(TAG, "Camera " + mCameraIds[i] +
416                             " does not support color outputs, skipping");
417                     continue;
418                 }
419                 faceDetectionTestByCamera();
420             } finally {
421                 closeDevice();
422             }
423         }
424     }
425 
426     /**
427      * Test tone map modes and controls.
428      */
testToneMapControl()429     public void testToneMapControl() throws Exception {
430         for (String id : mCameraIds) {
431             try {
432                 openDevice(id);
433                 if (!mStaticInfo.isManualToneMapSupported()) {
434                     Log.i(TAG, "Camera " + id +
435                             " doesn't support tone mapping controls, skipping test");
436                     continue;
437                 }
438                 toneMapTestByCamera();
439             } finally {
440                 closeDevice();
441             }
442         }
443     }
444 
445     /**
446      * Test color correction modes and controls.
447      */
testColorCorrectionControl()448     public void testColorCorrectionControl() throws Exception {
449         for (String id : mCameraIds) {
450             try {
451                 openDevice(id);
452                 if (!mStaticInfo.isColorCorrectionSupported()) {
453                     Log.i(TAG, "Camera " + id +
454                             " doesn't support color correction controls, skipping test");
455                     continue;
456                 }
457                 colorCorrectionTestByCamera();
458             } finally {
459                 closeDevice();
460             }
461         }
462     }
463 
testEdgeModeControl()464     public void testEdgeModeControl() throws Exception {
465         for (String id : mCameraIds) {
466             try {
467                 openDevice(id);
468                 if (!mStaticInfo.isEdgeModeControlSupported()) {
469                     Log.i(TAG, "Camera " + id +
470                             " doesn't support EDGE_MODE controls, skipping test");
471                     continue;
472                 }
473 
474                 edgeModesTestByCamera();
475             } finally {
476                 closeDevice();
477             }
478         }
479     }
480 
481     /**
482      * Test focus distance control.
483      */
testFocusDistanceControl()484     public void testFocusDistanceControl() throws Exception {
485         for (String id : mCameraIds) {
486             try {
487                 openDevice(id);
488                 if (!mStaticInfo.hasFocuser()) {
489                     Log.i(TAG, "Camera " + id + " has no focuser, skipping test");
490                     continue;
491                 }
492 
493                 if (!mStaticInfo.isCapabilitySupported(
494                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
495                     Log.i(TAG, "Camera " + id +
496                             " does not support MANUAL_SENSOR, skipping test");
497                     continue;
498                 }
499 
500                 focusDistanceTestByCamera();
501             } finally {
502                 closeDevice();
503             }
504         }
505     }
506 
testNoiseReductionModeControl()507     public void testNoiseReductionModeControl() throws Exception {
508         for (String id : mCameraIds) {
509             try {
510                 openDevice(id);
511                 if (!mStaticInfo.isNoiseReductionModeControlSupported()) {
512                     Log.i(TAG, "Camera " + id +
513                             " doesn't support noise reduction mode, skipping test");
514                     continue;
515                 }
516 
517                 noiseReductionModeTestByCamera();
518             } finally {
519                 closeDevice();
520             }
521         }
522     }
523 
524     /**
525      * Test AWB lock control.
526      *
527      * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p>
528      */
testAwbModeAndLock()529     public void testAwbModeAndLock() throws Exception {
530         for (String id : mCameraIds) {
531             try {
532                 openDevice(id);
533                 if (!mStaticInfo.isColorOutputSupported()) {
534                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
535                     continue;
536                 }
537                 awbModeAndLockTestByCamera();
538             } finally {
539                 closeDevice();
540             }
541         }
542     }
543 
544     /**
545      * Test different AF modes.
546      */
testAfModes()547     public void testAfModes() throws Exception {
548         for (String id : mCameraIds) {
549             try {
550                 openDevice(id);
551                 if (!mStaticInfo.isColorOutputSupported()) {
552                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
553                     continue;
554                 }
555                 afModeTestByCamera();
556             } finally {
557                 closeDevice();
558             }
559         }
560     }
561 
562     /**
563      * Test video and optical stabilizations.
564      */
testCameraStabilizations()565     public void testCameraStabilizations() throws Exception {
566         for (String id : mCameraIds) {
567             try {
568                 openDevice(id);
569                 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
570                 if (!(keys.contains(
571                         CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ||
572                         keys.contains(
573                                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) {
574                     Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes");
575                     continue;
576                 }
577                 if (!mStaticInfo.isColorOutputSupported()) {
578                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
579                     continue;
580                 }
581                 stabilizationTestByCamera();
582             } finally {
583                 closeDevice();
584             }
585         }
586     }
587 
588     /**
589      * Test digitalZoom (center wise and non-center wise), validate the returned crop regions.
590      * The max preview size is used for each camera.
591      */
testDigitalZoom()592     public void testDigitalZoom() throws Exception {
593         for (String id : mCameraIds) {
594             try {
595                 openDevice(id);
596                 if (!mStaticInfo.isColorOutputSupported()) {
597                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
598                     continue;
599                 }
600                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
601                 digitalZoomTestByCamera(maxPreviewSize);
602             } finally {
603                 closeDevice();
604             }
605         }
606     }
607 
608     /**
609      * Test digital zoom and all preview size combinations.
610      * TODO: this and above test should all be moved to preview test class.
611      */
testDigitalZoomPreviewCombinations()612     public void testDigitalZoomPreviewCombinations() throws Exception {
613         for (String id : mCameraIds) {
614             try {
615                 openDevice(id);
616                 if (!mStaticInfo.isColorOutputSupported()) {
617                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
618                     continue;
619                 }
620                 digitalZoomPreviewCombinationTestByCamera();
621             } finally {
622                 closeDevice();
623             }
624         }
625     }
626 
627     /**
628      * Test scene mode controls.
629      */
testSceneModes()630     public void testSceneModes() throws Exception {
631         for (String id : mCameraIds) {
632             try {
633                 openDevice(id);
634                 if (mStaticInfo.isSceneModeSupported()) {
635                     sceneModeTestByCamera();
636                 }
637             } finally {
638                 closeDevice();
639             }
640         }
641     }
642 
643     /**
644      * Test effect mode controls.
645      */
testEffectModes()646     public void testEffectModes() throws Exception {
647         for (String id : mCameraIds) {
648             try {
649                 openDevice(id);
650                 if (!mStaticInfo.isColorOutputSupported()) {
651                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
652                     continue;
653                 }
654                 effectModeTestByCamera();
655             } finally {
656                 closeDevice();
657             }
658         }
659     }
660 
661     // TODO: add 3A state machine test.
662 
663     /**
664      * Per camera dynamic black and white level test.
665      */
dynamicBlackWhiteLevelTestByCamera()666     private void dynamicBlackWhiteLevelTestByCamera() throws Exception {
667         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
668         SimpleImageReaderListener imageListener = null;
669         CaptureRequest.Builder previewBuilder =
670                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
671         CaptureRequest.Builder rawBuilder = null;
672         Size previewSize =
673                 getMaxPreviewSize(mCamera.getId(), mCameraManager,
674                 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
675         Size rawSize = null;
676         boolean canCaptureBlackRaw =
677                 mStaticInfo.isCapabilitySupported(
678                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) &&
679                 mStaticInfo.isOpticalBlackRegionSupported();
680         if (canCaptureBlackRaw) {
681             // Capture Raw16, then calculate the optical black, and use it to check with the dynamic
682             // black level.
683             rawBuilder =
684                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
685             rawSize = mStaticInfo.getRawDimensChecked();
686             imageListener = new SimpleImageReaderListener();
687             prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize,
688                     resultListener, imageListener);
689         } else {
690             startPreview(previewBuilder, previewSize, resultListener);
691         }
692 
693         // Capture a sequence of frames with different sensitivities and validate the black/white
694         // level values
695         int[] sensitivities = getSensitivityTestValues();
696         float[][] dynamicBlackLevels = new float[sensitivities.length][];
697         int[] dynamicWhiteLevels = new int[sensitivities.length];
698         float[][] opticalBlackLevels = new float[sensitivities.length][];
699         for (int i = 0; i < sensitivities.length; i++) {
700             CaptureResult result = null;
701             if (canCaptureBlackRaw) {
702                 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
703                 CaptureRequest rawRequest = rawBuilder.build();
704                 mSession.capture(rawRequest, resultListener, mHandler);
705                 result = resultListener.getCaptureResultForRequest(rawRequest,
706                         NUM_RESULTS_WAIT_TIMEOUT);
707                 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
708 
709                 // Get max (area-wise) optical black region
710                 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get(
711                         CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS);
712                 Rect maxRegion = opticalBlackRegions[0];
713                 for (Rect region : opticalBlackRegions) {
714                     if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) {
715                         maxRegion = region;
716                     }
717                 }
718 
719                 // Get average black pixel values in the region (region is multiple of 2x2)
720                 Image.Plane rawPlane = rawImage.getPlanes()[0];
721                 ByteBuffer rawBuffer = rawPlane.getBuffer();
722                 float[] avgBlackLevels = {0, 0, 0, 0};
723                 final int rowSize = rawPlane.getRowStride();
724                 final int bytePerPixel = rawPlane.getPixelStride();
725                 if (VERBOSE) {
726                     Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " +
727                             rawPlane.getRowStride());
728                 }
729                 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) {
730                     for (int col = maxRegion.left; col < maxRegion.right; col += 2) {
731                         int startOffset = row * rowSize + col * bytePerPixel;
732                         avgBlackLevels[0] += rawBuffer.getShort(startOffset);
733                         avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel);
734                         startOffset += rowSize;
735                         avgBlackLevels[2] += rawBuffer.getShort(startOffset);
736                         avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel);
737                     }
738                 }
739                 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2);
740                 for (int m = 0; m < avgBlackLevels.length; m++) {
741                     avgBlackLevels[m] /= numBlackBlocks;
742                 }
743                 opticalBlackLevels[i] = avgBlackLevels;
744 
745                 if (VERBOSE) {
746                     Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s",
747                             sensitivities[i], Arrays.toString(avgBlackLevels)));
748                 }
749 
750                 rawImage.close();
751             } else {
752                 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
753                 CaptureRequest previewRequest = previewBuilder.build();
754                 mSession.capture(previewRequest, resultListener, mHandler);
755                 result = resultListener.getCaptureResultForRequest(previewRequest,
756                         NUM_RESULTS_WAIT_TIMEOUT);
757             }
758 
759             dynamicBlackLevels[i] = getValueNotNull(result,
760                     CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL);
761             dynamicWhiteLevels[i] = getValueNotNull(result,
762                     CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL);
763         }
764 
765         if (VERBOSE) {
766             Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities));
767             Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels));
768             Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels));
769             if (canCaptureBlackRaw) {
770                 Log.v(TAG, "Optical black level results " +
771                         Arrays.deepToString(opticalBlackLevels));
772             }
773         }
774 
775         // check the dynamic black level against global black level.
776         // Implicit guarantee: if the dynamic black level is supported, fixed black level must be
777         // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions).
778         BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get(
779                 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN);
780         int[] fixedBlackLevels = new int[4];
781         int fixedWhiteLevel = mStaticInfo.getCharacteristics().get(
782                 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL);
783         blackPattern.copyTo(fixedBlackLevels, 0);
784         float maxBlackDeviation = 0;
785         int maxWhiteDeviation = 0;
786         for (int i = 0; i < dynamicBlackLevels.length; i++) {
787             for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
788                 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) {
789                     maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]);
790                 }
791             }
792             if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) {
793                 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel);
794             }
795         }
796         mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level"
797                 + " exceed threshold."
798                 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels),
799                 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation);
800         mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold."
801                 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels),
802                 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN,
803                 (float)maxWhiteDeviation);
804 
805         // Validate against optical black levels if it is available
806         if (canCaptureBlackRaw) {
807             maxBlackDeviation = 0;
808             for (int i = 0; i < dynamicBlackLevels.length; i++) {
809                 for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
810                     if (maxBlackDeviation <
811                             Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) {
812                         maxBlackDeviation =
813                                 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]);
814                     }
815                 }
816             }
817 
818             mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black"
819                     + " exceed threshold."
820                     + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)
821                     + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels),
822                     fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN,
823                     maxBlackDeviation);
824         }
825     }
826 
noiseReductionModeTestByCamera()827     private void noiseReductionModeTestByCamera() throws Exception {
828         Size maxPrevSize = mOrderedPreviewSizes.get(0);
829         CaptureRequest.Builder requestBuilder =
830                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
831         int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
832         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
833         startPreview(requestBuilder, maxPrevSize, resultListener);
834 
835         for (int mode : availableModes) {
836             requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode);
837             resultListener = new SimpleCaptureCallback();
838             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
839             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
840 
841             verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode,
842                     resultListener, NUM_FRAMES_VERIFIED);
843 
844             // Test that OFF and FAST mode should not slow down the frame rate.
845             if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF ||
846                     mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) {
847                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED);
848             }
849         }
850 
851         stopPreview();
852     }
853 
focusDistanceTestByCamera()854     private void focusDistanceTestByCamera() throws Exception {
855         CaptureRequest.Builder requestBuilder =
856                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
857         requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
858         int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked();
859         float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED;
860         if (calibrationStatus ==
861                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) {
862             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED;
863         } else if (calibrationStatus ==
864                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) {
865             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE;
866         }
867 
868         // Test changing focus distance with repeating request
869         focusDistanceTestRepeating(requestBuilder, errorMargin);
870 
871         if (calibrationStatus ==
872                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED)  {
873             // Test changing focus distance with burst request
874             focusDistanceTestBurst(requestBuilder, errorMargin);
875         }
876     }
877 
focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)878     private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder,
879             float errorMargin) throws Exception {
880         CaptureRequest request;
881         float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0);
882         Size maxPrevSize = mOrderedPreviewSizes.get(0);
883         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
884         startPreview(requestBuilder, maxPrevSize, resultListener);
885 
886         float[] resultDistances = new float[testDistances.length];
887         int[] resultLensStates = new int[testDistances.length];
888 
889         // Collect results
890         for (int i = 0; i < testDistances.length; i++) {
891             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
892             request = requestBuilder.build();
893             resultListener = new SimpleCaptureCallback();
894             mSession.setRepeatingRequest(request, resultListener, mHandler);
895             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
896             waitForResultValue(resultListener, CaptureResult.LENS_STATE,
897                     CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
898             CaptureResult result = resultListener.getCaptureResultForRequest(request,
899                     NUM_RESULTS_WAIT_TIMEOUT);
900 
901             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
902             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
903 
904             if (VERBOSE) {
905                 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i]
906                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
907             }
908         }
909 
910         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
911                 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0,
912                 errorMargin);
913 
914         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
915 
916             // Test hyperfocal distance optionally
917             float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
918             if (hyperFocalDistance > 0) {
919                 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance);
920                 request = requestBuilder.build();
921                 resultListener = new SimpleCaptureCallback();
922                 mSession.setRepeatingRequest(request, resultListener, mHandler);
923                 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
924 
925                 // Then wait for the lens.state to be stationary.
926                 waitForResultValue(resultListener, CaptureResult.LENS_STATE,
927                         CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
928                 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
929                 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
930                 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" +
931                         " requested value", focusDistance,
932                         hyperFocalDistance * (1.0f - errorMargin),
933                         hyperFocalDistance * (1.0f + errorMargin));
934             }
935         }
936     }
937 
focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)938     private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder,
939             float errorMargin) throws Exception {
940 
941         Size maxPrevSize = mOrderedPreviewSizes.get(0);
942         float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT,
943                 NUM_FOCUS_DISTANCES_REPEAT);
944         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
945         startPreview(requestBuilder, maxPrevSize, resultListener);
946 
947         float[] resultDistances = new float[testDistances.length];
948         int[] resultLensStates = new int[testDistances.length];
949 
950         final int maxPipelineDepth = mStaticInfo.getCharacteristics().get(
951             CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
952 
953         // Move lens to starting position, and wait for the lens.state to be stationary.
954         CaptureRequest request;
955         requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]);
956         request = requestBuilder.build();
957         mSession.setRepeatingRequest(request, resultListener, mHandler);
958         waitForResultValue(resultListener, CaptureResult.LENS_STATE,
959                 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
960 
961         // Submit burst of requests with different focus distances
962         List<CaptureRequest> burst = new ArrayList<>();
963         for (int i = 0; i < testDistances.length; i ++) {
964             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
965             burst.add(requestBuilder.build());
966         }
967         mSession.captureBurst(burst, resultListener, mHandler);
968 
969         for (int i = 0; i < testDistances.length; i++) {
970             CaptureResult result = resultListener.getCaptureResultForRequest(
971                     burst.get(i), maxPipelineDepth+1);
972 
973             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
974             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
975 
976             if (VERBOSE) {
977                 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i]
978                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
979             }
980         }
981 
982         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
983                 /*ascendingOrder*/true, /*noOvershoot*/true,
984                 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT,
985                 errorMargin);
986 
987     }
988 
989     /**
990      * Verify focus distance control.
991      *
992      * Assumption:
993      * - First repeatStart+1 elements of requestedDistances share the same value
994      * - Last repeatEnd+1 elements of requestedDistances share the same value
995      * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder.
996      * - Focuser is at requestedDistances[0] at the beginning of the test.
997      *
998      * @param requestedDistances The requested focus distances
999      * @param resultDistances The result focus distances
1000      * @param lensStates The result lens states
1001      * @param ascendingOrder The order of the expected focus distance request/output
1002      * @param noOvershoot Assert that focus control doesn't overshoot the requested value
1003      * @param repeatStart The number of times the starting focus distance is repeated
1004      * @param repeatEnd The number of times the ending focus distance is repeated
1005      * @param errorMargin The error margin between request and result
1006      */
verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1007     private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances,
1008             int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart,
1009             int repeatEnd, float errorMargin) {
1010 
1011         float minValue = 0;
1012         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
1013         float hyperfocalDistance = 0;
1014         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1015             hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1016         }
1017 
1018         // Verify lens and focus distance do not change for first repeatStart
1019         // results.
1020         for (int i = 0; i < repeatStart; i ++) {
1021             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1022             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1023             float marginMax =
1024                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1025 
1026             mCollector.expectEquals("Lens moves even though focus_distance didn't change",
1027                     lensStates[i], CaptureResult.LENS_STATE_STATIONARY);
1028             if (noOvershoot) {
1029                 mCollector.expectInRange("Focus distance in result should be close enough to " +
1030                         "requested value", resultDistances[i], marginMin, marginMax);
1031             }
1032             mCollector.expectInRange("Result focus distance is out of range",
1033                     resultDistances[i], minValue, maxValue);
1034         }
1035 
1036         for (int i = repeatStart; i < resultDistances.length-1; i ++) {
1037             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1038             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1039             float marginMax =
1040                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1041             if (noOvershoot) {
1042                 // Result focus distance shouldn't overshoot the request
1043                 boolean condition;
1044                 if (ascendingOrder) {
1045                     condition = resultDistances[i] <= marginMax;
1046                } else {
1047                     condition = resultDistances[i] >= marginMin;
1048                 }
1049                 mCollector.expectTrue(String.format(
1050                       "Lens shouldn't move past request focus distance. result " +
1051                       resultDistances[i] + " vs target of " +
1052                       (ascendingOrder ? marginMax : marginMin)), condition);
1053             }
1054 
1055             // Verify monotonically increased focus distance setting
1056             boolean condition;
1057             float compareDistance = resultDistances[i+1] - resultDistances[i];
1058             if (i < resultDistances.length-1-repeatEnd) {
1059                 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0);
1060             } else {
1061                 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0);
1062             }
1063             mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results ["
1064                   + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + ","
1065                   + lensStates[i+1] + "] monotonicity is broken"), condition);
1066         }
1067 
1068         mCollector.expectTrue(String.format("All values of this array are equal: " +
1069                 resultDistances[0] + " " + resultDistances[resultDistances.length-1]),
1070                 resultDistances[0] != resultDistances[resultDistances.length-1]);
1071 
1072         // Verify lens moved to destination location.
1073         mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] +
1074                 " for minFocusDistance should be closed enough to requested value " +
1075                 requestedDistances[requestedDistances.length-1],
1076                 resultDistances[resultDistances.length-1],
1077                 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin),
1078                 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin));
1079     }
1080 
1081     /**
1082      * Verify edge mode control results.
1083      */
edgeModesTestByCamera()1084     private void edgeModesTestByCamera() throws Exception {
1085         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1086         int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
1087         CaptureRequest.Builder requestBuilder =
1088                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1089         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1090         startPreview(requestBuilder, maxPrevSize, resultListener);
1091 
1092         for (int mode : edgeModes) {
1093             requestBuilder.set(CaptureRequest.EDGE_MODE, mode);
1094             resultListener = new SimpleCaptureCallback();
1095             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1096             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1097 
1098             verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener,
1099                     NUM_FRAMES_VERIFIED);
1100 
1101             // Test that OFF and FAST mode should not slow down the frame rate.
1102             if (mode == CaptureRequest.EDGE_MODE_OFF ||
1103                     mode == CaptureRequest.EDGE_MODE_FAST) {
1104                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED);
1105             }
1106         }
1107 
1108         stopPreview();
1109     }
1110 
1111     /**
1112      * Test color correction controls.
1113      *
1114      * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test
1115      * the unit gain and identity transform.</p>
1116      */
colorCorrectionTestByCamera()1117     private void colorCorrectionTestByCamera() throws Exception {
1118         CaptureRequest request;
1119         CaptureResult result;
1120         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1121         updatePreviewSurface(maxPreviewSz);
1122         CaptureRequest.Builder manualRequestBuilder = createRequestForPreview();
1123         CaptureRequest.Builder previewRequestBuilder = createRequestForPreview();
1124         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1125 
1126         startPreview(previewRequestBuilder, maxPreviewSz, listener);
1127 
1128         // Default preview result should give valid color correction metadata.
1129         result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1130         validateColorCorrectionResult(result,
1131                 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE));
1132         int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
1133         // TRANSFORM_MATRIX mode
1134         // Only test unit gain and identity transform
1135         List<Integer> availableControlModes = Arrays.asList(
1136                 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked()));
1137         List<Integer> availableAwbModes = Arrays.asList(
1138                 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked()));
1139         boolean isManualCCSupported =
1140                 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) ||
1141                 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF);
1142         if (isManualCCSupported) {
1143             if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) {
1144                 // Only manual AWB mode is supported
1145                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1146                         CaptureRequest.CONTROL_MODE_AUTO);
1147                 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1148                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1149             } else {
1150                 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode.
1151                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1152                         CaptureRequest.CONTROL_MODE_OFF);
1153             }
1154 
1155             RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
1156 
1157             ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
1158                 new Rational[] {
1159                     ONE_R, ZERO_R, ZERO_R,
1160                     ZERO_R, ONE_R, ZERO_R,
1161                     ZERO_R, ZERO_R, ONE_R
1162                 });
1163 
1164             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1165             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
1166             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
1167             request = manualRequestBuilder.build();
1168             mSession.capture(request, listener, mHandler);
1169             result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1170             RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
1171             ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
1172             validateColorCorrectionResult(result, colorCorrectionMode);
1173             mCollector.expectEquals("control mode result/request mismatch",
1174                     CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
1175             mCollector.expectEquals("Color correction gain result/request mismatch",
1176                     UNIT_GAIN, gains);
1177             mCollector.expectEquals("Color correction gain result/request mismatch",
1178                     IDENTITY_TRANSFORM, transform);
1179 
1180         }
1181 
1182         // FAST mode
1183         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST;
1184         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1185         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1186         request = manualRequestBuilder.build();
1187         mSession.capture(request, listener, mHandler);
1188         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1189         validateColorCorrectionResult(result, colorCorrectionMode);
1190         mCollector.expectEquals("control mode result/request mismatch",
1191                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1192 
1193         // HIGH_QUALITY mode
1194         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY;
1195         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1196         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1197         request = manualRequestBuilder.build();
1198         mSession.capture(request, listener, mHandler);
1199         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1200         validateColorCorrectionResult(result, colorCorrectionMode);
1201         mCollector.expectEquals("control mode result/request mismatch",
1202                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1203     }
1204 
validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1205     private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) {
1206         final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0);
1207         final int TRANSFORM_SIZE = 9;
1208         Rational[] zeroTransform = new Rational[TRANSFORM_SIZE];
1209         Arrays.fill(zeroTransform, ZERO_R);
1210         final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform);
1211 
1212         RggbChannelVector resultGain;
1213         if ((resultGain = mCollector.expectKeyValueNotNull(result,
1214                 CaptureResult.COLOR_CORRECTION_GAINS)) != null) {
1215             mCollector.expectKeyValueNotEquals(result,
1216                     CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS);
1217         }
1218 
1219         ColorSpaceTransform resultTransform;
1220         if ((resultTransform = mCollector.expectKeyValueNotNull(result,
1221                 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) {
1222             mCollector.expectKeyValueNotEquals(result,
1223                     CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM);
1224         }
1225 
1226         mCollector.expectEquals("color correction mode result/request mismatch",
1227                 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE));
1228     }
1229 
1230     /**
1231      * Test flash mode control by AE mode.
1232      * <p>
1233      * Only allow AE mode ON or OFF, because other AE mode could run into conflict with
1234      * flash manual control. This function expects the camera to already have an active
1235      * repeating request and be sending results to the listener.
1236      * </p>
1237      *
1238      * @param listener The Capture listener that is used to wait for capture result
1239      * @param aeMode The AE mode for flash to test with
1240      */
flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1241     private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception {
1242         CaptureResult result;
1243         final int NUM_FLASH_REQUESTS_TESTED = 10;
1244         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1245 
1246         if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) {
1247             requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
1248         } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
1249             changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
1250         } else {
1251             throw new IllegalArgumentException("This test only works when AE mode is ON or OFF");
1252         }
1253 
1254         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1255         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1256 
1257         // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE.
1258         if (mStaticInfo.getFlashInfoChecked() == false) {
1259             for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1260                 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1261                 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE"
1262                         + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE,
1263                         result.get(CaptureResult.FLASH_STATE));
1264             }
1265 
1266             return;
1267         }
1268 
1269         // Test flash SINGLE mode control. Wait for flash state to be READY first.
1270         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
1271             waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY,
1272                     NUM_RESULTS_WAIT_TIMEOUT);
1273         } // else the settings were already waited on earlier
1274 
1275         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
1276         CaptureRequest flashSinglerequest = requestBuilder.build();
1277 
1278         int flashModeSingleRequests = captureRequestsSynchronized(
1279                 flashSinglerequest, listener, mHandler);
1280         waitForNumResults(listener, flashModeSingleRequests - 1);
1281         result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT);
1282         // Result mode must be SINGLE, state must be FIRED.
1283         mCollector.expectEquals("Flash mode result must be SINGLE",
1284                 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE));
1285         mCollector.expectEquals("Flash state result must be FIRED",
1286                 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1287 
1288         // Test flash TORCH mode control.
1289         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1290         CaptureRequest torchRequest = requestBuilder.build();
1291 
1292         int flashModeTorchRequests = captureRequestsSynchronized(torchRequest,
1293                 NUM_FLASH_REQUESTS_TESTED, listener, mHandler);
1294         waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED);
1295 
1296         // Verify the results
1297         TorchSeqState state = TorchSeqState.RAMPING_UP;
1298         for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1299             result = listener.getCaptureResultForRequest(torchRequest,
1300                     NUM_RESULTS_WAIT_TIMEOUT);
1301             int flashMode = result.get(CaptureResult.FLASH_MODE);
1302             int flashState = result.get(CaptureResult.FLASH_STATE);
1303             // Result mode must be TORCH
1304             mCollector.expectEquals("Flash mode result " + i + " must be TORCH",
1305                     CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE));
1306             if (state == TorchSeqState.RAMPING_UP &&
1307                     flashState == CaptureResult.FLASH_STATE_FIRED) {
1308                 state = TorchSeqState.FIRED;
1309             } else if (state == TorchSeqState.FIRED &&
1310                     flashState == CaptureResult.FLASH_STATE_PARTIAL) {
1311                 state = TorchSeqState.RAMPING_DOWN;
1312             }
1313 
1314             if (i == 0 && mStaticInfo.isPerFrameControlSupported()) {
1315                 mCollector.expectTrue(
1316                         "Per frame control device must enter FIRED state on first torch request",
1317                         state == TorchSeqState.FIRED);
1318             }
1319 
1320             if (state == TorchSeqState.FIRED) {
1321                 mCollector.expectEquals("Flash state result " + i + " must be FIRED",
1322                         CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1323             } else {
1324                 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL",
1325                         CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE));
1326             }
1327         }
1328         mCollector.expectTrue("Torch state FIRED never seen",
1329                 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN);
1330 
1331         // Test flash OFF mode control
1332         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1333         CaptureRequest flashOffrequest = requestBuilder.build();
1334 
1335         int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler);
1336         waitForNumResults(listener, flashModeOffRequests - 1);
1337         result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT);
1338         mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF,
1339                 result.get(CaptureResult.FLASH_MODE));
1340     }
1341 
verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1342     private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified,
1343             int mode, boolean isAeManual, long requestExpTime) throws Exception {
1344         // Skip the first a couple of frames as antibanding may not be fully up yet.
1345         final int NUM_FRAMES_SKIPPED = 5;
1346         for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) {
1347             listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1348         }
1349 
1350         for (int i = 0; i < numFramesVerified; i++) {
1351             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1352             Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
1353             assertNotNull("Exposure time shouldn't be null", resultExpTime);
1354             Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER);
1355             // Scene flicker result should be always available.
1356             assertNotNull("Scene flicker must not be null", flicker);
1357             assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE &&
1358                     flicker <= STATISTICS_SCENE_FLICKER_60HZ);
1359 
1360             if (isAeManual) {
1361                 // First, round down not up, second, need close enough.
1362                 validateExposureTime(requestExpTime, resultExpTime);
1363                 return;
1364             }
1365 
1366             long expectedExpTime = resultExpTime; // Default, no exposure adjustment.
1367             if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) {
1368                 // result exposure time must be adjusted by 50Hz illuminant source.
1369                 expectedExpTime =
1370                         getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1371             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) {
1372                 // result exposure time must be adjusted by 60Hz illuminant source.
1373                 expectedExpTime =
1374                         getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1375             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){
1376                 /**
1377                  * Use STATISTICS_SCENE_FLICKER to tell the illuminant source
1378                  * and do the exposure adjustment.
1379                  */
1380                 expectedExpTime = resultExpTime;
1381                 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) {
1382                     expectedExpTime =
1383                             getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1384                 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) {
1385                     expectedExpTime =
1386                             getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1387                 }
1388             }
1389 
1390             if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) {
1391                 mCollector.addMessage(String.format("Result exposure time %dns diverges too much"
1392                         + " from expected exposure time %dns for mode %d when AE is auto",
1393                         resultExpTime, expectedExpTime, mode));
1394             }
1395         }
1396     }
1397 
antiBandingTestByMode(Size size, int mode)1398     private void antiBandingTestByMode(Size size, int mode)
1399             throws Exception {
1400         if(VERBOSE) {
1401             Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId());
1402         }
1403         CaptureRequest.Builder requestBuilder =
1404                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1405 
1406         requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode);
1407 
1408         // Test auto AE mode anti-banding behavior
1409         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1410         startPreview(requestBuilder, size, resultListener);
1411         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1412         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false,
1413                 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK);
1414 
1415         // Test manual AE mode anti-banding behavior
1416         // 65ms, must be supported by full capability devices.
1417         final long TEST_MANUAL_EXP_TIME_NS = 65000000L;
1418         long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS);
1419         changeExposure(requestBuilder, manualExpTime);
1420         resultListener = new SimpleCaptureCallback();
1421         startPreview(requestBuilder, size, resultListener);
1422         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1423         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true,
1424                 manualExpTime);
1425 
1426         stopPreview();
1427     }
1428 
1429     /**
1430      * Test the all available AE modes and AE lock.
1431      * <p>
1432      * For manual AE mode, test iterates through different sensitivities and
1433      * exposure times, validate the result exposure time correctness. For
1434      * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested.
1435      * For the rest of the AUTO mode, AE lock is tested.
1436      * </p>
1437      *
1438      * @param mode
1439      */
aeModeAndLockTestByMode(int mode)1440     private void aeModeAndLockTestByMode(int mode)
1441             throws Exception {
1442         switch (mode) {
1443             case CONTROL_AE_MODE_OFF:
1444                 if (mStaticInfo.isCapabilitySupported(
1445                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
1446                     // Test manual exposure control.
1447                     aeManualControlTest();
1448                 } else {
1449                     Log.w(TAG,
1450                             "aeModeAndLockTestByMode - can't test AE mode OFF without " +
1451                             "manual sensor control");
1452                 }
1453                 break;
1454             case CONTROL_AE_MODE_ON:
1455             case CONTROL_AE_MODE_ON_AUTO_FLASH:
1456             case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1457             case CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1458                 // Test AE lock for above AUTO modes.
1459                 aeAutoModeTestLock(mode);
1460                 break;
1461             default:
1462                 throw new UnsupportedOperationException("Unhandled AE mode " + mode);
1463         }
1464     }
1465 
1466     /**
1467      * Test AE auto modes.
1468      * <p>
1469      * Use single request rather than repeating request to test AE lock per frame control.
1470      * </p>
1471      */
aeAutoModeTestLock(int mode)1472     private void aeAutoModeTestLock(int mode) throws Exception {
1473         CaptureRequest.Builder requestBuilder =
1474                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1475         if (mStaticInfo.isAeLockSupported()) {
1476             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1477         }
1478         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode);
1479         configurePreviewOutput(requestBuilder);
1480 
1481         final int MAX_NUM_CAPTURES_DURING_LOCK = 5;
1482         for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) {
1483             autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i);
1484         }
1485     }
1486 
1487     /**
1488      * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
1489      * the first capture result after the AE lock. The right AE lock behavior is:
1490      * When it is locked, it locks to the current exposure value, and all subsequent
1491      * request with lock ON will have the same exposure value locked.
1492      */
autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)1493     private void autoAeMultipleCapturesThenTestLock(
1494             CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)
1495             throws Exception {
1496         if (numCapturesDuringLock < 1) {
1497             throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
1498         }
1499         if (VERBOSE) {
1500             Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode "
1501                     + aeMode + " with " + numCapturesDuringLock + " captures before lock");
1502         }
1503 
1504         final int NUM_CAPTURES_BEFORE_LOCK = 2;
1505         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1506 
1507         CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
1508         boolean canSetAeLock = mStaticInfo.isAeLockSupported();
1509 
1510         // Reset the AE lock to OFF, since we are reusing this builder many times
1511         if (canSetAeLock) {
1512             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1513         }
1514 
1515         // Just send several captures with auto AE, lock off.
1516         CaptureRequest request = requestBuilder.build();
1517         for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
1518             mSession.capture(request, listener, mHandler);
1519         }
1520         waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
1521 
1522         if (!canSetAeLock) {
1523             // Without AE lock, the remaining tests items won't work
1524             return;
1525         }
1526 
1527         // Then fire several capture to lock the AE.
1528         requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1529 
1530         int requestCount = captureRequestsSynchronized(
1531                 requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
1532 
1533         int[] sensitivities = new int[numCapturesDuringLock];
1534         long[] expTimes = new long[numCapturesDuringLock];
1535         Arrays.fill(sensitivities, -1);
1536         Arrays.fill(expTimes, -1L);
1537 
1538         // Get the AE lock on result and validate the exposure values.
1539         waitForNumResults(listener, requestCount - numCapturesDuringLock);
1540         for (int i = 0; i < resultsDuringLock.length; i++) {
1541             resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1542         }
1543 
1544         for (int i = 0; i < numCapturesDuringLock; i++) {
1545             mCollector.expectKeyValueEquals(
1546                     resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
1547         }
1548 
1549         // Can't read manual sensor/exposure settings without manual sensor
1550         if (mStaticInfo.isCapabilitySupported(
1551                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1552             int sensitivityLocked =
1553                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
1554             long expTimeLocked =
1555                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
1556             for (int i = 1; i < resultsDuringLock.length; i++) {
1557                 mCollector.expectKeyValueEquals(
1558                         resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
1559                 mCollector.expectKeyValueEquals(
1560                         resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
1561             }
1562         }
1563     }
1564 
1565     /**
1566      * Iterate through exposure times and sensitivities for manual AE control.
1567      * <p>
1568      * Use single request rather than repeating request to test manual exposure
1569      * value change per frame control.
1570      * </p>
1571      */
aeManualControlTest()1572     private void aeManualControlTest()
1573             throws Exception {
1574         CaptureRequest.Builder requestBuilder =
1575                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1576 
1577         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
1578         configurePreviewOutput(requestBuilder);
1579         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1580 
1581         long[] expTimes = getExposureTimeTestValues();
1582         int[] sensitivities = getSensitivityTestValues();
1583         // Submit single request at a time, then verify the result.
1584         for (int i = 0; i < expTimes.length; i++) {
1585             for (int j = 0; j < sensitivities.length; j++) {
1586                 if (VERBOSE) {
1587                     Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity "
1588                             + sensitivities[j] + ", exposure time " + expTimes[i] + "ns");
1589                 }
1590 
1591                 changeExposure(requestBuilder, expTimes[i], sensitivities[j]);
1592                 mSession.capture(requestBuilder.build(), listener, mHandler);
1593 
1594                 // make sure timeout is long enough for long exposure time
1595                 long timeout = WAIT_FOR_RESULT_TIMEOUT_MS + expTimes[i];
1596                 CaptureResult result = listener.getCaptureResult(timeout);
1597                 long resultExpTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1598                 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1599                 validateExposureTime(expTimes[i], resultExpTime);
1600                 validateSensitivity(sensitivities[j], resultSensitivity);
1601                 validateFrameDurationForCapture(result);
1602             }
1603         }
1604         // TODO: Add another case to test where we can submit all requests, then wait for
1605         // results, which will hide the pipeline latency. this is not only faster, but also
1606         // test high speed per frame control and synchronization.
1607     }
1608 
1609 
1610     /**
1611      * Verify black level lock control.
1612      */
verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)1613     private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified,
1614             int maxLockOffCnt) throws Exception {
1615         int noLockCnt = 0;
1616         for (int i = 0; i < numFramesVerified; i++) {
1617             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1618             Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK);
1619             assertNotNull("Black level lock result shouldn't be null", blackLevelLock);
1620 
1621             // Count the lock == false result, which could possibly occur at most once.
1622             if (blackLevelLock == false) {
1623                 noLockCnt++;
1624             }
1625 
1626             if(VERBOSE) {
1627                 Log.v(TAG, "Black level lock result: " + blackLevelLock);
1628             }
1629         }
1630         assertTrue("Black level lock OFF occurs " + noLockCnt + " times,  expect at most "
1631                 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt);
1632     }
1633 
1634     /**
1635      * Verify shading map for different shading modes.
1636      */
verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)1637     private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified,
1638             int shadingMode) throws Exception {
1639 
1640         for (int i = 0; i < numFramesVerified; i++) {
1641             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1642             mCollector.expectEquals("Shading mode result doesn't match request",
1643                     shadingMode, result.get(CaptureResult.SHADING_MODE));
1644             LensShadingMap mapObj = result.get(
1645                     CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
1646             assertNotNull("Map object must not be null", mapObj);
1647             int numElementsInMap = mapObj.getGainFactorCount();
1648             float[] map = new float[numElementsInMap];
1649             mapObj.copyGainFactors(map, /*offset*/0);
1650             assertNotNull("Map must not be null", map);
1651             assertFalse(String.format(
1652                     "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE),
1653                     numElementsInMap >= MAX_SHADING_MAP_SIZE);
1654             assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap,
1655                     MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE);
1656 
1657             if (shadingMode == CaptureRequest.SHADING_MODE_FAST ||
1658                     shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) {
1659                 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all
1660                 // elements >= 1.0f
1661 
1662                 int badValueCnt = 0;
1663                 // Detect the bad values of the map data.
1664                 for (int j = 0; j < numElementsInMap; j++) {
1665                     if (Float.isNaN(map[j]) || map[j] < 1.0f) {
1666                         badValueCnt++;
1667                     }
1668                 }
1669                 assertEquals("Number of value in the map is " + badValueCnt + " out of "
1670                         + numElementsInMap, /*expected*/0, /*actual*/badValueCnt);
1671             } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) {
1672                 float[] unityMap = new float[numElementsInMap];
1673                 Arrays.fill(unityMap, 1.0f);
1674                 // shading mode is OFF, expect to receive a unity map.
1675                 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map",
1676                         Arrays.equals(unityMap, map));
1677             }
1678         }
1679     }
1680 
1681     /**
1682      * Test face detection for a camera.
1683      */
1684     private void faceDetectionTestByCamera() throws Exception {
1685         int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
1686 
1687         SimpleCaptureCallback listener;
1688         CaptureRequest.Builder requestBuilder =
1689                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1690 
1691         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1692         for (int mode : faceDetectModes) {
1693             requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode);
1694             if (VERBOSE) {
1695                 Log.v(TAG, "Start testing face detection mode " + mode);
1696             }
1697 
1698             // Create a new listener for each run to avoid the results from one run spill
1699             // into another run.
1700             listener = new SimpleCaptureCallback();
1701             startPreview(requestBuilder, maxPreviewSz, listener);
1702             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1703             verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode);
1704         }
1705 
1706         stopPreview();
1707     }
1708 
1709     /**
1710      * Verify face detection results for different face detection modes.
1711      *
1712      * @param listener The listener to get capture result
1713      * @param numFramesVerified Number of results to be verified
1714      * @param faceDetectionMode Face detection mode to be verified against
1715      */
1716     private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified,
1717             int faceDetectionMode) {
1718         for (int i = 0; i < numFramesVerified; i++) {
1719             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1720             mCollector.expectEquals("Result face detection mode should match the request",
1721                     faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE));
1722 
1723             Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
1724             List<Integer> faceIds = new ArrayList<Integer>(faces.length);
1725             List<Integer> faceScores = new ArrayList<Integer>(faces.length);
1726             if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
1727                 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode",
1728                         0, faces.length);
1729             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
1730                 for (Face face : faces) {
1731                     mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds());
1732                     faceScores.add(face.getScore());
1733                     mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode",
1734                             face.getId() == Face.ID_UNSUPPORTED);
1735                 }
1736             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
1737                 if (VERBOSE) {
1738                     Log.v(TAG, "Number of faces detected: " + faces.length);
1739                 }
1740 
1741                 for (Face face : faces) {
1742                     Rect faceBound;
1743                     boolean faceRectAvailable =  mCollector.expectTrue("Face rectangle "
1744                             + "shouldn't be null", face.getBounds() != null);
1745                     if (!faceRectAvailable) {
1746                         continue;
1747                     }
1748                     faceBound = face.getBounds();
1749 
1750                     faceScores.add(face.getScore());
1751                     faceIds.add(face.getId());
1752 
1753                     mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode",
1754                             face.getId() != Face.ID_UNSUPPORTED);
1755                     boolean leftEyeAvailable =
1756                             mCollector.expectTrue("Left eye position shouldn't be null",
1757                                     face.getLeftEyePosition() != null);
1758                     boolean rightEyeAvailable =
1759                             mCollector.expectTrue("Right eye position shouldn't be null",
1760                                     face.getRightEyePosition() != null);
1761                     boolean mouthAvailable =
1762                             mCollector.expectTrue("Mouth position shouldn't be null",
1763                             face.getMouthPosition() != null);
1764                     // Eyes/mouth position should be inside of the face rect.
1765                     if (leftEyeAvailable) {
1766                         Point leftEye = face.getLeftEyePosition();
1767                         mCollector.expectTrue("Left eye " + leftEye + "should be"
1768                                 + "inside of face rect " + faceBound,
1769                                 faceBound.contains(leftEye.x, leftEye.y));
1770                     }
1771                     if (rightEyeAvailable) {
1772                         Point rightEye = face.getRightEyePosition();
1773                         mCollector.expectTrue("Right eye " + rightEye + "should be"
1774                                 + "inside of face rect " + faceBound,
1775                                 faceBound.contains(rightEye.x, rightEye.y));
1776                     }
1777                     if (mouthAvailable) {
1778                         Point mouth = face.getMouthPosition();
1779                         mCollector.expectTrue("Mouth " + mouth +  " should be inside of"
1780                                 + " face rect " + faceBound,
1781                                 faceBound.contains(mouth.x, mouth.y));
1782                     }
1783                 }
1784             }
1785             mCollector.expectValuesInRange("Face scores are invalid", faceScores,
1786                     Face.SCORE_MIN, Face.SCORE_MAX);
1787             mCollector.expectValuesUnique("Face ids are invalid", faceIds);
1788         }
1789     }
1790 
1791     /**
1792      * Test tone map mode and result by camera
1793      */
1794     private void toneMapTestByCamera() throws Exception {
1795         if (!mStaticInfo.isManualToneMapSupported()) {
1796             return;
1797         }
1798 
1799         CaptureRequest.Builder requestBuilder =
1800                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1801         int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
1802         for (int mode : toneMapModes) {
1803             if (VERBOSE) {
1804                 Log.v(TAG, "Testing tonemap mode " + mode);
1805             }
1806 
1807             requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode);
1808             switch (mode) {
1809                 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE:
1810                     TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR,
1811                             TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR);
1812                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
1813                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1814 
1815                     toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB,
1816                             TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB);
1817                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
1818                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1819                     break;
1820                 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE:
1821                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f);
1822                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1823                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f);
1824                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1825                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f);
1826                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1827                     break;
1828                 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE:
1829                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
1830                             CaptureRequest.TONEMAP_PRESET_CURVE_REC709);
1831                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1832                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
1833                             CaptureRequest.TONEMAP_PRESET_CURVE_SRGB);
1834                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1835                     break;
1836                 default:
1837                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
1838                     break;
1839             }
1840         }
1841 
1842 
1843     }
1844 
1845     /**
1846      * Test tonemap mode with speficied request settings
1847      *
1848      * @param numFramesVerified Number of results to be verified
1849      * @param requestBuilder the request builder of settings to be tested
1850      */
1851     private void testToneMapMode (int numFramesVerified,
1852             CaptureRequest.Builder requestBuilder)  throws Exception  {
1853         final int MIN_TONEMAP_CURVE_POINTS = 2;
1854         final Float ZERO = new Float(0);
1855         final Float ONE = new Float(1.0f);
1856 
1857         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1858         int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE);
1859         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1860         startPreview(requestBuilder, maxPreviewSz, listener);
1861         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1862 
1863         int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked();
1864         for (int i = 0; i < numFramesVerified; i++) {
1865             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1866             mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode,
1867                     result.get(CaptureResult.TONEMAP_MODE));
1868             TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE);
1869             int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED);
1870             float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE];
1871             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN);
1872             float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE];
1873             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE);
1874             float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE];
1875             tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0);
1876             tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0);
1877             tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0);
1878             if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) {
1879                 /**
1880                  * TODO: need figure out a good way to measure the difference
1881                  * between request and result, as they may have different array
1882                  * size.
1883                  */
1884             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) {
1885                 mCollector.expectEquals("Capture result gamma value should match request",
1886                         requestBuilder.get(CaptureRequest.TONEMAP_GAMMA),
1887                         result.get(CaptureResult.TONEMAP_GAMMA));
1888             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) {
1889                 mCollector.expectEquals("Capture result preset curve should match request",
1890                         requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE),
1891                         result.get(CaptureResult.TONEMAP_PRESET_CURVE));
1892             }
1893 
1894             // Tonemap curve result availability and basic sanity check for all modes.
1895             mCollector.expectValuesInRange("Tonemap curve red values are out of range",
1896                     CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
1897             mCollector.expectInRange("Tonemap curve red length is out of range",
1898                     mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
1899             mCollector.expectValuesInRange("Tonemap curve green values are out of range",
1900                     CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
1901             mCollector.expectInRange("Tonemap curve green length is out of range",
1902                     mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
1903             mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
1904                     CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
1905             mCollector.expectInRange("Tonemap curve blue length is out of range",
1906                     mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
1907         }
1908         stopPreview();
1909     }
1910 
1911     /**
1912      * Test awb mode control.
1913      * <p>
1914      * Test each supported AWB mode, verify the AWB mode in capture result
1915      * matches request. When AWB is locked, the color correction gains and
1916      * transform should remain unchanged.
1917      * </p>
1918      */
1919     private void awbModeAndLockTestByCamera() throws Exception {
1920         int[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
1921         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1922         boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
1923         CaptureRequest.Builder requestBuilder =
1924                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1925         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
1926 
1927         for (int mode : awbModes) {
1928             SimpleCaptureCallback listener;
1929             requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode);
1930             listener = new SimpleCaptureCallback();
1931             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1932             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1933 
1934             // Verify AWB mode in capture result.
1935             verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener,
1936                     NUM_FRAMES_VERIFIED);
1937 
1938             if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) {
1939                 // Verify color correction transform and gains stay unchanged after a lock.
1940                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
1941                 listener = new SimpleCaptureCallback();
1942                 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1943                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1944 
1945                 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) {
1946                     waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE,
1947                             CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT);
1948                 }
1949 
1950             }
1951             // Don't verify auto mode result if AWB lock is not supported
1952             if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) {
1953                 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED);
1954             }
1955         }
1956     }
1957 
1958     private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener,
1959             int numFramesVerified) {
1960         // Skip check if cc gains/transform/mode are not available
1961         if (!mStaticInfo.areKeysAvailable(
1962                 CaptureResult.COLOR_CORRECTION_GAINS,
1963                 CaptureResult.COLOR_CORRECTION_TRANSFORM,
1964                 CaptureResult.COLOR_CORRECTION_MODE)) {
1965             return;
1966         }
1967 
1968         CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1969         RggbChannelVector lockedGains =
1970                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
1971         ColorSpaceTransform lockedTransform =
1972                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
1973 
1974         for (int i = 0; i < numFramesVerified; i++) {
1975             result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1976             // Color correction mode check is skipped here, as it is checked in colorCorrectionTest.
1977             validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE));
1978 
1979             RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
1980             ColorSpaceTransform transform =
1981                     getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
1982             mCollector.expectEquals("Color correction gains should remain unchanged after awb lock",
1983                     lockedGains, gains);
1984             mCollector.expectEquals("Color correction transform should remain unchanged after"
1985                     + " awb lock", lockedTransform, transform);
1986         }
1987     }
1988 
1989     /**
1990      * Test AF mode control.
1991      * <p>
1992      * Test all supported AF modes, verify the AF mode in capture result matches
1993      * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode,
1994      * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED
1995      * state within certain amount of frames.
1996      * </p>
1997      */
1998     private void afModeTestByCamera() throws Exception {
1999         int[] afModes = mStaticInfo.getAfAvailableModesChecked();
2000         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2001         CaptureRequest.Builder requestBuilder =
2002                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2003         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2004 
2005         for (int mode : afModes) {
2006             SimpleCaptureCallback listener;
2007             requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode);
2008             listener = new SimpleCaptureCallback();
2009             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2010             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2011 
2012             // Verify AF mode in capture result.
2013             verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener,
2014                     NUM_FRAMES_VERIFIED);
2015 
2016             // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes.
2017             // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily
2018             // result in a passive AF call if the camera has already been focused, and the scene has
2019             // not changed enough to trigger an AF pass.  Skip this constraint for LEGACY.
2020             if (mStaticInfo.isHardwareLevelAtLeastLimited() &&
2021                     (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ||
2022                     mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
2023                 List<Integer> afStateList = new ArrayList<Integer>();
2024                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED);
2025                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED);
2026                 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList,
2027                         NUM_RESULTS_WAIT_TIMEOUT);
2028             }
2029         }
2030     }
2031 
2032     /**
2033      * Test video and optical stabilizations if they are supported by a given camera.
2034      */
2035     private void stabilizationTestByCamera() throws Exception {
2036         // video stabilization test.
2037         List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
2038 
2039         Integer[] videoStabModes = (keys.contains(CameraCharacteristics.
2040                 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ?
2041                 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) :
2042                     new Integer[0];
2043         int[] opticalStabModes = (keys.contains(
2044                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ?
2045                 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0];
2046 
2047         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2048         CaptureRequest.Builder requestBuilder =
2049                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2050         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2051         startPreview(requestBuilder, maxPreviewSize, listener);
2052 
2053         for (Integer mode : videoStabModes) {
2054             listener = new SimpleCaptureCallback();
2055             requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode);
2056             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2057             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2058             // Video stabilization could return any modes.
2059             verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE,
2060                     videoStabModes, listener, NUM_FRAMES_VERIFIED);
2061         }
2062 
2063         for (int mode : opticalStabModes) {
2064             listener = new SimpleCaptureCallback();
2065             requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode);
2066             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2067             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2068             verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode,
2069                     listener, NUM_FRAMES_VERIFIED);
2070         }
2071 
2072         stopPreview();
2073     }
2074 
2075     private void digitalZoomTestByCamera(Size previewSize) throws Exception {
2076         final int ZOOM_STEPS = 15;
2077         final PointF[] TEST_ZOOM_CENTERS;
2078 
2079         final int croppingType = mStaticInfo.getScalerCroppingTypeChecked();
2080         if (croppingType ==
2081                 CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) {
2082             TEST_ZOOM_CENTERS = new PointF[] {
2083                 new PointF(0.5f, 0.5f),   // Center point
2084                 new PointF(0.25f, 0.25f), // top left corner zoom, minimal zoom: 2x
2085                 new PointF(0.75f, 0.25f), // top right corner zoom, minimal zoom: 2x
2086                 new PointF(0.25f, 0.75f), // bottom left corner zoom, minimal zoom: 2x
2087                 new PointF(0.75f, 0.75f), // bottom right corner zoom, minimal zoom: 2x
2088             };
2089 
2090             if (VERBOSE) {
2091                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM");
2092             }
2093         } else {
2094             // CENTER_ONLY
2095             TEST_ZOOM_CENTERS = new PointF[] {
2096                     new PointF(0.5f, 0.5f),   // Center point
2097             };
2098 
2099             if (VERBOSE) {
2100                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY");
2101             }
2102         }
2103 
2104         final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked();
2105         final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
2106         Rect[] cropRegions = new Rect[ZOOM_STEPS];
2107         MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
2108         CaptureRequest.Builder requestBuilder =
2109                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2110         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2111 
2112         updatePreviewSurface(previewSize);
2113         configurePreviewOutput(requestBuilder);
2114 
2115         CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS];
2116 
2117         // Set algorithm regions to full active region
2118         // TODO: test more different 3A regions
2119         final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] {
2120                 new MeteringRectangle (
2121                         /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(),
2122                         /*meteringWeight*/1)
2123         };
2124 
2125         for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2126             update3aRegion(requestBuilder, algo,  defaultMeteringRect);
2127         }
2128 
2129         final int CAPTURE_SUBMIT_REPEAT;
2130         {
2131             int maxLatency = mStaticInfo.getSyncMaxLatency();
2132             if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
2133                 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1;
2134             } else {
2135                 CAPTURE_SUBMIT_REPEAT = maxLatency + 1;
2136             }
2137         }
2138 
2139         if (VERBOSE) {
2140             Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT);
2141         }
2142 
2143         for (PointF center : TEST_ZOOM_CENTERS) {
2144             Rect previousCrop = null;
2145 
2146             for (int i = 0; i < ZOOM_STEPS; i++) {
2147                 /*
2148                  * Submit capture request
2149                  */
2150                 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS);
2151                 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, maxZoom, activeArraySize);
2152                 if (VERBOSE) {
2153                     Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " +
2154                             center + " The cropRegion is " + cropRegions[i] +
2155                             " Preview size is " + previewSize);
2156                 }
2157                 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]);
2158                 requests[i] = requestBuilder.build();
2159                 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) {
2160                     if (VERBOSE) {
2161                         Log.v(TAG, "submit crop region " + cropRegions[i]);
2162                     }
2163                     mSession.capture(requests[i], listener, mHandler);
2164                 }
2165 
2166                 /*
2167                  * Validate capture result
2168                  */
2169                 waitForNumResults(listener, CAPTURE_SUBMIT_REPEAT - 1); // Drop first few frames
2170                 CaptureResult result = listener.getCaptureResultForRequest(
2171                         requests[i], NUM_RESULTS_WAIT_TIMEOUT);
2172                 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
2173 
2174                 /*
2175                  * Validate resulting crop regions
2176                  */
2177                 if (previousCrop != null) {
2178                     Rect currentCrop = cropRegion;
2179                     mCollector.expectTrue(String.format(
2180                             "Crop region should shrink or stay the same " +
2181                                     "(previous = %s, current = %s)",
2182                                     previousCrop, currentCrop),
2183                             previousCrop.equals(currentCrop) ||
2184                                 (previousCrop.width() > currentCrop.width() &&
2185                                  previousCrop.height() > currentCrop.height()));
2186                 }
2187 
2188                 if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2189                     mCollector.expectRectsAreSimilar(
2190                             "Request and result crop region should be similar",
2191                             cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA);
2192                 }
2193 
2194                 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) {
2195                     mCollector.expectRectCentered(
2196                             "Result crop region should be centered inside the active array",
2197                             new Size(activeArraySize.width(), activeArraySize.height()),
2198                             cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED);
2199                 }
2200 
2201                 /*
2202                  * Validate resulting metering regions
2203                  */
2204 
2205                 // Use the actual reported crop region to calculate the resulting metering region
2206                 expectRegions[i] = getExpectedOutputRegion(
2207                         /*requestRegion*/defaultMeteringRect,
2208                         /*cropRect*/     cropRegion);
2209 
2210                 // Verify Output 3A region is intersection of input 3A region and crop region
2211                 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2212                     validate3aRegion(result, algo, expectRegions[i]);
2213                 }
2214 
2215                 previousCrop = cropRegion;
2216             }
2217 
2218             if (maxZoom > 1.0f) {
2219                 mCollector.expectTrue(
2220                         String.format("Most zoomed-in crop region should be smaller" +
2221                                         "than active array w/h" +
2222                                         "(last crop = %s, active array = %s)",
2223                                         previousCrop, activeArraySize),
2224                             (previousCrop.width() < activeArraySize.width() &&
2225                              previousCrop.height() < activeArraySize.height()));
2226             }
2227         }
2228     }
2229 
2230     private void digitalZoomPreviewCombinationTestByCamera() throws Exception {
2231         final double ASPECT_RATIO_THRESHOLD = 0.001;
2232         List<Double> aspectRatiosTested = new ArrayList<Double>();
2233         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2234         aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight());
2235 
2236         for (Size size : mOrderedPreviewSizes) {
2237             // Max preview size was already tested in testDigitalZoom test. skip it.
2238             if (size.equals(maxPreviewSize)) {
2239                 continue;
2240             }
2241 
2242             // Only test the largest size for each aspect ratio.
2243             double aspectRatio = (double)(size.getWidth()) / size.getHeight();
2244             if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) {
2245                 continue;
2246             }
2247 
2248             if (VERBOSE) {
2249                 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom");
2250             }
2251 
2252             aspectRatiosTested.add(aspectRatio);
2253             digitalZoomTestByCamera(size);
2254         }
2255     }
2256 
2257     private static boolean isAspectRatioContained(List<Double> aspectRatioList,
2258             double aspectRatio, double delta) {
2259         for (Double ratio : aspectRatioList) {
2260             if (Math.abs(ratio - aspectRatio) < delta) {
2261                 return true;
2262             }
2263         }
2264 
2265         return false;
2266     }
2267 
2268     private void sceneModeTestByCamera() throws Exception {
2269         int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
2270         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2271         CaptureRequest.Builder requestBuilder =
2272                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2273         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2274         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
2275         startPreview(requestBuilder, maxPreviewSize, listener);
2276 
2277         for(int mode : sceneModes) {
2278             requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode);
2279             listener = new SimpleCaptureCallback();
2280             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2281             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2282 
2283             verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE,
2284                     mode, listener, NUM_FRAMES_VERIFIED);
2285             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2286             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2287                     CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED);
2288         }
2289     }
2290 
2291     private void effectModeTestByCamera() throws Exception {
2292         int[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
2293         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2294         CaptureRequest.Builder requestBuilder =
2295                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2296         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
2297         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2298         startPreview(requestBuilder, maxPreviewSize, listener);
2299 
2300         for(int mode : effectModes) {
2301             requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode);
2302             listener = new SimpleCaptureCallback();
2303             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2304             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2305 
2306             verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE,
2307                     mode, listener, NUM_FRAMES_VERIFIED);
2308             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2309             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2310                     CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED);
2311         }
2312     }
2313 
2314     //----------------------------------------------------------------
2315     //---------Below are common functions for all tests.--------------
2316     //----------------------------------------------------------------
2317 
2318     /**
2319      * Enable exposure manual control and change exposure and sensitivity and
2320      * clamp the value into the supported range.
2321      */
2322     private void changeExposure(CaptureRequest.Builder requestBuilder,
2323             long expTime, int sensitivity) {
2324         // Check if the max analog sensitivity is available and no larger than max sensitivity.
2325         // The max analog sensitivity is not actually used here. This is only an extra sanity check.
2326         mStaticInfo.getMaxAnalogSensitivityChecked();
2327 
2328         expTime = mStaticInfo.getExposureClampToRange(expTime);
2329         sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity);
2330 
2331         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
2332         requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime);
2333         requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
2334     }
2335     /**
2336      * Enable exposure manual control and change exposure time and
2337      * clamp the value into the supported range.
2338      *
2339      * <p>The sensitivity is set to default value.</p>
2340      */
2341     private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) {
2342         changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY);
2343     }
2344 
2345     /**
2346      * Get the exposure time array that contains multiple exposure time steps in
2347      * the exposure time range.
2348      */
2349     private long[] getExposureTimeTestValues() {
2350         long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1];
2351         long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS);
2352         long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS);
2353 
2354         long range = maxExpTime - minExpTime;
2355         double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS;
2356         for (int i = 0; i < testValues.length; i++) {
2357             testValues[i] = maxExpTime - (long)(stepSize * i);
2358             testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]);
2359         }
2360 
2361         return testValues;
2362     }
2363 
2364     /**
2365      * Generate test focus distances in range of [0, minFocusDistance] in increasing order.
2366      *
2367      * @param repeatMin number of times minValue will be repeated.
2368      * @param repeatMax number of times maxValue will be repeated.
2369      */
2370     private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) {
2371         int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax;
2372         float[] testValues = new float[totalCount];
2373         float minValue = 0;
2374         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
2375 
2376         float range = maxValue - minValue;
2377         float stepSize = range / NUM_TEST_FOCUS_DISTANCES;
2378 
2379         for (int i = 0; i < repeatMin; i++) {
2380             testValues[i] = minValue;
2381         }
2382         for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) {
2383             testValues[repeatMin+i] = minValue + stepSize * i;
2384         }
2385         for (int i = 0; i < repeatMax; i++) {
2386             testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] =
2387                     maxValue;
2388         }
2389 
2390         return testValues;
2391     }
2392 
2393     /**
2394      * Get the sensitivity array that contains multiple sensitivity steps in the
2395      * sensitivity range.
2396      * <p>
2397      * Sensitivity number of test values is determined by
2398      * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and
2399      * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}.
2400      * </p>
2401      */
2402     private int[] getSensitivityTestValues() {
2403         int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault(
2404                 DEFAULT_SENSITIVITY);
2405         int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault(
2406                 DEFAULT_SENSITIVITY);
2407 
2408         int range = maxSensitivity - minSensitivity;
2409         int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE;
2410         int numSteps = range / stepSize;
2411         // Bound the test steps to avoid supper long test.
2412         if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) {
2413             numSteps = DEFAULT_NUM_SENSITIVITY_STEPS;
2414             stepSize = range / numSteps;
2415         }
2416         int[] testValues = new int[numSteps + 1];
2417         for (int i = 0; i < testValues.length; i++) {
2418             testValues[i] = maxSensitivity - stepSize * i;
2419             testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]);
2420         }
2421 
2422         return testValues;
2423     }
2424 
2425     /**
2426      * Validate the AE manual control exposure time.
2427      *
2428      * <p>Exposure should be close enough, and only round down if they are not equal.</p>
2429      *
2430      * @param request Request exposure time
2431      * @param result Result exposure time
2432      */
2433     private void validateExposureTime(long request, long result) {
2434         long expTimeDelta = request - result;
2435         long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request
2436                 * EXPOSURE_TIME_ERROR_MARGIN_RATE));
2437         // First, round down not up, second, need close enough.
2438         mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: "
2439                 + request + " result: " + result,
2440                 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0);
2441     }
2442 
2443     /**
2444      * Validate AE manual control sensitivity.
2445      *
2446      * @param request Request sensitivity
2447      * @param result Result sensitivity
2448      */
2449     private void validateSensitivity(int request, int result) {
2450         float sensitivityDelta = request - result;
2451         float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE;
2452         // First, round down not up, second, need close enough.
2453         mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: "
2454                 + request + " result: " + result,
2455                 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0);
2456     }
2457 
2458     /**
2459      * Validate frame duration for a given capture.
2460      *
2461      * <p>Frame duration should be longer than exposure time.</p>
2462      *
2463      * @param result The capture result for a given capture
2464      */
2465     private void validateFrameDurationForCapture(CaptureResult result) {
2466         long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2467         long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
2468         if (VERBOSE) {
2469             Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime);
2470         }
2471 
2472         mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure"
2473                 + " time (%d) for a given capture", frameDuration, expTime),
2474                 frameDuration >= expTime);
2475 
2476         validatePipelineDepth(result);
2477     }
2478 
2479     /**
2480      * Basic verification for the control mode capture result.
2481      *
2482      * @param key The capture result key to be verified against
2483      * @param requestMode The request mode for this result
2484      * @param listener The capture listener to get capture results
2485      * @param numFramesVerified The number of capture results to be verified
2486      */
2487     private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode,
2488             SimpleCaptureCallback listener, int numFramesVerified) {
2489         for (int i = 0; i < numFramesVerified; i++) {
2490             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2491             validatePipelineDepth(result);
2492             T resultMode = getValueNotNull(result, key);
2493             if (VERBOSE) {
2494                 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: "
2495                         + resultMode.toString());
2496             }
2497             mCollector.expectEquals("Key " + key.getName() + " result should match request",
2498                     requestMode, resultMode);
2499         }
2500     }
2501 
2502     /**
2503      * Basic verification that the value of a capture result key should be one of the expected
2504      * values.
2505      *
2506      * @param key The capture result key to be verified against
2507      * @param expectedModes The list of any possible expected modes for this result
2508      * @param listener The capture listener to get capture results
2509      * @param numFramesVerified The number of capture results to be verified
2510      */
2511     private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes,
2512             SimpleCaptureCallback listener, int numFramesVerified) {
2513         for (int i = 0; i < numFramesVerified; i++) {
2514             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2515             validatePipelineDepth(result);
2516             T resultMode = getValueNotNull(result, key);
2517             if (VERBOSE) {
2518                 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: "
2519                         + resultMode.toString());
2520             }
2521             // Capture result should be one of the expected values.
2522             mCollector.expectContains(expectedModes, resultMode);
2523         }
2524     }
2525 
2526     /**
2527      * Verify if the fps is slow down for given input request with certain
2528      * controls inside.
2529      * <p>
2530      * This method selects a max preview size for each fps range, and then
2531      * configure the preview stream. Preview is started with the max preview
2532      * size, and then verify if the result frame duration is in the frame
2533      * duration range.
2534      * </p>
2535      *
2536      * @param requestBuilder The request builder that contains post-processing
2537      *            controls that could impact the output frame rate, such as
2538      *            {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of
2539      *            these controls must be set to some values such that the frame
2540      *            rate is not slow down.
2541      * @param numFramesVerified The number of frames to be verified
2542      */
2543     private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder,
2544             int numFramesVerified)  throws Exception {
2545         boolean frameDurationAvailable = true;
2546         // Allow a few frames for AE to settle on target FPS range
2547         final int NUM_FRAME_TO_SKIP = 6;
2548         float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN;
2549         if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) {
2550             frameDurationAvailable = false;
2551             // Allow a larger error margin (1.5%) for timestamps
2552             frameDurationErrorMargin = 0.015f;
2553         }
2554 
2555         Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked();
2556         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
2557         Range<Integer> fpsRange;
2558         SimpleCaptureCallback resultListener;
2559 
2560         for (int i = 0; i < fpsRanges.length; i += 1) {
2561             fpsRange = fpsRanges[i];
2562             Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange);
2563             // If unable to find a preview size, then log the failure, and skip this run.
2564             if (previewSz == null) {
2565                 if (mStaticInfo.isCapabilitySupported(
2566                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
2567                     mCollector.addMessage(String.format(
2568                             "Unable to find a preview size supporting given fps range %s",
2569                             fpsRange));
2570                 }
2571                 continue;
2572             }
2573 
2574             if (VERBOSE) {
2575                 Log.v(TAG, String.format("Test fps range %s for preview size %s",
2576                         fpsRange, previewSz.toString()));
2577             }
2578             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
2579             // Turn off auto antibanding to avoid exposure time and frame duration interference
2580             // from antibanding algorithm.
2581             if (antiBandingOffIsSupported) {
2582                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
2583                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
2584             } else {
2585                 // The device doesn't implement the OFF mode, test continues. It need make sure
2586                 // that the antibanding algorithm doesn't slow down the fps.
2587                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
2588                         " not slow down the frame rate regardless of its current antibanding" +
2589                         " mode");
2590             }
2591 
2592             resultListener = new SimpleCaptureCallback();
2593             startPreview(requestBuilder, previewSz, resultListener);
2594             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2595             // Wait several more frames for AE to settle on target FPS range
2596             waitForNumResults(resultListener, NUM_FRAME_TO_SKIP);
2597 
2598             long[] frameDurationRange = new long[]{
2599                     (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
2600             long captureTime = 0, prevCaptureTime = 0;
2601             for (int j = 0; j < numFramesVerified; j++) {
2602                 long frameDuration = frameDurationRange[0];
2603                 CaptureResult result =
2604                         resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2605                 validatePipelineDepth(result);
2606                 if (frameDurationAvailable) {
2607                     frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
2608                 } else {
2609                     // if frame duration is not available, check timestamp instead
2610                     captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP);
2611                     if (j > 0) {
2612                         frameDuration = captureTime - prevCaptureTime;
2613                     }
2614                     prevCaptureTime = captureTime;
2615                 }
2616                 mCollector.expectInRange(
2617                         "Frame duration must be in the range of " +
2618                                 Arrays.toString(frameDurationRange),
2619                         frameDuration,
2620                         (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)),
2621                         (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin)));
2622             }
2623         }
2624 
2625         mSession.stopRepeating();
2626     }
2627 
2628     /**
2629      * Validate the pipeline depth result.
2630      *
2631      * @param result The capture result to get pipeline depth data
2632      */
2633     private void validatePipelineDepth(CaptureResult result) {
2634         final byte MIN_PIPELINE_DEPTH = 1;
2635         byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked();
2636         Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH);
2637         mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]",
2638                 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH,
2639                 maxPipelineDepth);
2640     }
2641 
2642     /**
2643      * Calculate the anti-flickering corrected exposure time.
2644      * <p>
2645      * If the input exposure time is very short (shorter than flickering
2646      * boundary), which indicate the scene is bright and very likely at outdoor
2647      * environment, skip the correction, as it doesn't make much sense by doing so.
2648      * </p>
2649      * <p>
2650      * For long exposure time (larger than the flickering boundary), find the
2651      * exposure time that is closest to the flickering boundary.
2652      * </p>
2653      *
2654      * @param flickeringMode The flickering mode
2655      * @param exposureTime The input exposureTime to be corrected
2656      * @return anti-flickering corrected exposure time
2657      */
2658     private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) {
2659         if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) {
2660             throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz");
2661         }
2662         long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS;
2663         if (flickeringMode == ANTI_FLICKERING_60HZ) {
2664             flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS;
2665         }
2666 
2667         if (exposureTime <= flickeringBoundary) {
2668             return exposureTime;
2669         }
2670 
2671         // Find the closest anti-flickering corrected exposure time
2672         long correctedExpTime = exposureTime + (flickeringBoundary / 2);
2673         correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary);
2674         return correctedExpTime;
2675     }
2676 
2677     /**
2678      * Update one 3A region in capture request builder if that region is supported. Do nothing
2679      * if the specified 3A region is not supported by camera device.
2680      * @param requestBuilder The request to be updated
2681      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2682      * @param regions The 3A regions to be set
2683      */
2684     private void update3aRegion(
2685             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions)
2686     {
2687         int maxRegions;
2688         CaptureRequest.Key<MeteringRectangle[]> key;
2689 
2690         if (regions == null || regions.length == 0) {
2691             throw new IllegalArgumentException("Invalid input 3A region!");
2692         }
2693 
2694         switch (algoIdx) {
2695             case INDEX_ALGORITHM_AE:
2696                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
2697                 key = CaptureRequest.CONTROL_AE_REGIONS;
2698                 break;
2699             case INDEX_ALGORITHM_AWB:
2700                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
2701                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2702                 break;
2703             case INDEX_ALGORITHM_AF:
2704                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
2705                 key = CaptureRequest.CONTROL_AF_REGIONS;
2706                 break;
2707             default:
2708                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2709         }
2710 
2711         if (maxRegions >= regions.length) {
2712             requestBuilder.set(key, regions);
2713         }
2714     }
2715 
2716     /**
2717      * Validate one 3A region in capture result equals to expected region if that region is
2718      * supported. Do nothing if the specified 3A region is not supported by camera device.
2719      * @param result The capture result to be validated
2720      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2721      * @param expectRegions The 3A regions expected in capture result
2722      */
2723     private void validate3aRegion(
2724             CaptureResult result, int algoIdx, MeteringRectangle[] expectRegions)
2725     {
2726         int maxRegions;
2727         CaptureResult.Key<MeteringRectangle[]> key;
2728         MeteringRectangle[] actualRegion;
2729 
2730         switch (algoIdx) {
2731             case INDEX_ALGORITHM_AE:
2732                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
2733                 key = CaptureResult.CONTROL_AE_REGIONS;
2734                 break;
2735             case INDEX_ALGORITHM_AWB:
2736                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
2737                 key = CaptureResult.CONTROL_AWB_REGIONS;
2738                 break;
2739             case INDEX_ALGORITHM_AF:
2740                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
2741                 key = CaptureResult.CONTROL_AF_REGIONS;
2742                 break;
2743             default:
2744                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2745         }
2746 
2747         if (maxRegions > 0)
2748         {
2749             actualRegion = getValueNotNull(result, key);
2750             mCollector.expectEquals(
2751                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2752                     " does not match actual one: " + Arrays.toString(actualRegion),
2753                     expectRegions, actualRegion);
2754         }
2755     }
2756 }
2757