1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 import static android.hardware.camera2.CameraCharacteristics.*;
21 
22 import android.graphics.Point;
23 import android.graphics.PointF;
24 import android.graphics.Rect;
25 import android.graphics.SurfaceTexture;
26 import android.hardware.cts.helpers.CameraUtils;
27 import android.hardware.camera2.CameraCharacteristics;
28 import android.hardware.camera2.CameraDevice;
29 import android.hardware.camera2.CameraMetadata;
30 import android.hardware.camera2.CaptureRequest;
31 import android.hardware.camera2.CaptureResult;
32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
33 import android.hardware.camera2.cts.helpers.StaticMetadata;
34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
35 import android.hardware.camera2.params.BlackLevelPattern;
36 import android.hardware.camera2.params.Capability;
37 import android.hardware.camera2.params.ColorSpaceTransform;
38 import android.hardware.camera2.params.Face;
39 import android.hardware.camera2.params.LensShadingMap;
40 import android.hardware.camera2.params.MeteringRectangle;
41 import android.hardware.camera2.params.RggbChannelVector;
42 import android.hardware.camera2.params.TonemapCurve;
43 import android.hardware.camera2.TotalCaptureResult;
44 import android.media.Image;
45 import android.os.Parcel;
46 import android.util.ArraySet;
47 import android.util.Log;
48 import android.util.Range;
49 import android.util.Rational;
50 import android.util.Size;
51 import android.view.Surface;
52 
53 import java.nio.ByteBuffer;
54 import java.util.ArrayList;
55 import java.util.Arrays;
56 import java.util.List;
57 
58 import org.junit.runners.Parameterized;
59 import org.junit.runner.RunWith;
60 import org.junit.Test;
61 
62 /**
63  * <p>
64  * Basic test for camera CaptureRequest key controls.
65  * </p>
66  * <p>
67  * Several test categories are covered: manual sensor control, 3A control,
68  * manual ISP control and other per-frame control and synchronization.
69  * </p>
70  */
71 
72 @RunWith(Parameterized.class)
73 public class CaptureRequestTest extends Camera2SurfaceViewTestCase {
74     private static final String TAG = "CaptureRequestTest";
75     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
76     private static final int NUM_FRAMES_VERIFIED = 15;
77     private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60;
78     /** 30ms exposure time must be supported by full capability devices. */
79     private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms
80     private static final int DEFAULT_SENSITIVITY = 100;
81     private static final int RGGB_COLOR_CHANNEL_COUNT = 4;
82     private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT;
83     private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT;
84     private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L;
85     private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms
86     private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation.
87     private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation.
88     private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
89     private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation.
90     private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3;
91     private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8;
92     private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100;
93     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
94     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
95     private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100;
96     private static final int NUM_PARTIAL_FRAMES_PFC = 2;
97     private static final int NUM_PARTIAL_FRAMES_NPFC = 6;
98 
99     private static final int NUM_TEST_FOCUS_DISTANCES = 10;
100     private static final int NUM_FOCUS_DISTANCES_REPEAT = 3;
101     // 5 percent error margin for calibrated device
102     private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f;
103     // 25 percent error margin for uncalibrated device
104     private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f;
105     // 10 percent error margin for approximate device
106     private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f;
107     private static final int ANTI_FLICKERING_50HZ = 1;
108     private static final int ANTI_FLICKERING_60HZ = 2;
109     // 5 percent error margin for resulting crop regions
110     private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f;
111     // 1 percent error margin for centering the crop region
112     private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f;
113     private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f;
114     private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f;
115 
116     // Linear tone mapping curve example.
117     private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f};
118     // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points.
119     private static final float[] TONEMAP_CURVE_SRGB = {
120             0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f,
121             0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f,
122             0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f,
123             0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f
124     };
125     private final Rational ZERO_R = new Rational(0, 1);
126     private final Rational ONE_R = new Rational(1, 1);
127 
128     private final int NUM_ALGORITHMS = 3; // AE, AWB and AF
129     private final int INDEX_ALGORITHM_AE = 0;
130     private final int INDEX_ALGORITHM_AWB = 1;
131     private final int INDEX_ALGORITHM_AF = 2;
132 
133     private enum TorchSeqState {
134         RAMPING_UP,
135         FIRED,
136         RAMPING_DOWN
137     }
138 
139     @Override
setUp()140     public void setUp() throws Exception {
141         super.setUp();
142     }
143 
144     @Override
tearDown()145     public void tearDown() throws Exception {
146         super.tearDown();
147     }
148 
149     /**
150      * Test CaptureRequest settings parcelling.
151      */
152     @Test
testSettingsBinderParcel()153     public void testSettingsBinderParcel() throws Exception {
154         SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5);
155         Surface surface = new Surface(outputTexture);
156 
157         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
158             try {
159                 openDevice(mCameraIdsUnderTest[i]);
160                 CaptureRequest.Builder requestBuilder =
161                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
162                 requestBuilder.addTarget(surface);
163 
164                 // Check regular/default case
165                 CaptureRequest captureRequestOriginal = requestBuilder.build();
166                 Parcel p;
167                 p = Parcel.obtain();
168                 captureRequestOriginal.writeToParcel(p, 0);
169                 p.setDataPosition(0);
170                 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
171                 assertEquals("Parcelled camera settings should match",
172                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
173                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
174                 p.recycle();
175 
176                 // Check capture request with additional physical camera settings
177                 String physicalId = new String(Integer.toString(i + 1));
178                 ArraySet<String> physicalIds = new ArraySet<String> ();
179                 physicalIds.add(physicalId);
180 
181                 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW,
182                         physicalIds);
183                 requestBuilder.addTarget(surface);
184                 captureRequestOriginal = requestBuilder.build();
185                 p = Parcel.obtain();
186                 captureRequestOriginal.writeToParcel(p, 0);
187                 p.setDataPosition(0);
188                 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
189                 assertEquals("Parcelled camera settings should match",
190                         captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT),
191                         new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW));
192                 p.recycle();
193 
194                 // Check various invalid cases
195                 p = Parcel.obtain();
196                 p.writeInt(-1);
197                 p.setDataPosition(0);
198                 try {
199                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
200                     fail("should get RuntimeException due to invalid number of settings");
201                 } catch (RuntimeException e) {
202                     // Expected
203                 }
204                 p.recycle();
205 
206                 p = Parcel.obtain();
207                 p.writeInt(0);
208                 p.setDataPosition(0);
209                 try {
210                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
211                     fail("should get RuntimeException due to invalid number of settings");
212                 } catch (RuntimeException e) {
213                     // Expected
214                 }
215                 p.recycle();
216 
217                 p = Parcel.obtain();
218                 p.writeInt(1);
219                 p.setDataPosition(0);
220                 try {
221                     captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p);
222                     fail("should get RuntimeException due to absent settings");
223                 } catch (RuntimeException e) {
224                     // Expected
225                 }
226                 p.recycle();
227             } finally {
228                 closeDevice();
229             }
230         }
231     }
232 
233     /**
234      * Test black level lock when exposure value change.
235      * <p>
236      * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the
237      * camera device should lock the black level. When the exposure values are changed,
238      * the camera may require reset black level Since changes to certain capture
239      * parameters (such as exposure time) may require resetting of black level
240      * compensation. However, the black level must remain locked after exposure
241      * value changes (when requests have lock ON).
242      * </p>
243      */
244     @Test
testBlackLevelLock()245     public void testBlackLevelLock() throws Exception {
246         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
247             try {
248                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isCapabilitySupported(
249                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
250                     continue;
251                 }
252 
253                 openDevice(mCameraIdsUnderTest[i]);
254                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
255                 CaptureRequest.Builder requestBuilder =
256                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
257 
258                 // Start with default manual exposure time, with black level being locked.
259                 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true);
260                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
261 
262                 Size previewSz =
263                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
264                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
265 
266                 startPreview(requestBuilder, previewSz, listener);
267                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
268                 // No lock OFF state is allowed as the exposure is not changed.
269                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0);
270 
271                 // Double the exposure time and gain, with black level still being locked.
272                 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2);
273                 listener = new SimpleCaptureCallback();
274                 startPreview(requestBuilder, previewSz, listener);
275                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
276                 // Allow at most one lock OFF state as the exposure is changed once.
277                 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1);
278 
279                 stopPreview();
280             } finally {
281                 closeDevice();
282             }
283         }
284     }
285 
286     /**
287      * Test dynamic black/white levels if they are supported.
288      *
289      * <p>
290      * If the dynamic black and white levels are reported, test below:
291      *   1. the dynamic black and white levels shouldn't deviate from the global value too much
292      *   for different sensitivities.
293      *   2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and
294      *   calculate the optical black level values. The reported dynamic black level should be
295      *   close enough to the optical black level values.
296      * </p>
297      */
298     @Test
testDynamicBlackWhiteLevel()299     public void testDynamicBlackWhiteLevel() throws Exception {
300         for (String id : mCameraIdsUnderTest) {
301             try {
302                 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) {
303                     continue;
304                 }
305                 openDevice(id);
306                 dynamicBlackWhiteLevelTestByCamera();
307             } finally {
308                 closeDevice();
309             }
310         }
311     }
312 
313     /**
314      * Basic lens shading map request test.
315      * <p>
316      * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will
317      * be applied by the camera device, and an identity lens shading map data
318      * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON.
319      * </p>
320      * <p>
321      * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction
322      * will be applied by the camera device. The lens shading map data can be
323      * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON.
324      * </p>
325      */
326     @Test
testLensShadingMap()327     public void testLensShadingMap() throws Exception {
328         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
329             try {
330                 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIdsUnderTest[i]);
331                 if (!staticInfo.isManualLensShadingMapSupported()) {
332                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
333                             " doesn't support lens shading controls, skipping test");
334                     continue;
335                 }
336 
337                 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject(
338                         staticInfo.getAvailableLensShadingMapModesChecked()));
339 
340                 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) {
341                     continue;
342                 }
343 
344                 openDevice(mCameraIdsUnderTest[i]);
345                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
346                 CaptureRequest.Builder requestBuilder =
347                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
348                 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
349                         STATISTICS_LENS_SHADING_MAP_MODE_ON);
350 
351                 Size previewSz =
352                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
353                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
354                 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject(
355                         mStaticInfo.getAvailableLensShadingModesChecked()));
356 
357                 // Shading map mode OFF, lensShadingMapMode ON, camera device
358                 // should output unity maps.
359                 if (lensShadingModes.contains(SHADING_MODE_OFF)) {
360                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF);
361                     listener = new SimpleCaptureCallback();
362                     startPreview(requestBuilder, previewSz, listener);
363                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
364                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF);
365                 }
366 
367                 // Shading map mode FAST, lensShadingMapMode ON, camera device
368                 // should output valid maps.
369                 if (lensShadingModes.contains(SHADING_MODE_FAST)) {
370                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST);
371 
372                     listener = new SimpleCaptureCallback();
373                     startPreview(requestBuilder, previewSz, listener);
374                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
375                     // Allow at most one lock OFF state as the exposure is changed once.
376                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST);
377                 }
378 
379                 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device
380                 // should output valid maps.
381                 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) {
382                     requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY);
383 
384                     listener = new SimpleCaptureCallback();
385                     startPreview(requestBuilder, previewSz, listener);
386                     waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
387                     verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY);
388                 }
389 
390                 stopPreview();
391             } finally {
392                 closeDevice();
393             }
394         }
395     }
396 
397     /**
398      * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control.
399      * <p>
400      * Test all available anti-banding modes, check if the exposure time adjustment is
401      * correct.
402      * </p>
403      */
404     @Test
testAntiBandingModes()405     public void testAntiBandingModes() throws Exception {
406         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
407             try {
408                 // Without manual sensor control, exposure time cannot be verified
409                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isCapabilitySupported(
410                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
411                     continue;
412                 }
413 
414                 openDevice(mCameraIdsUnderTest[i]);
415                 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
416 
417                 Size previewSz =
418                         getMaxPreviewSize(mCamera.getId(), mCameraManager,
419                         getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
420 
421                 for (int mode : modes) {
422                     antiBandingTestByMode(previewSz, mode);
423                 }
424             } finally {
425                 closeDevice();
426             }
427         }
428 
429     }
430 
431     /**
432      * Test AE mode and lock.
433      *
434      * <p>
435      * For AE lock, when it is locked, exposure parameters shouldn't be changed.
436      * For AE modes, each mode should satisfy the per frame controls defined in
437      * API specifications.
438      * </p>
439      */
440     @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests
testAeModeAndLock()441     public void testAeModeAndLock() throws Exception {
442         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
443             try {
444                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
445                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
446                             " does not support color outputs, skipping");
447                     continue;
448                 }
449 
450                 openDevice(mCameraIdsUnderTest[i]);
451                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
452 
453                 // Update preview surface with given size for all sub-tests.
454                 updatePreviewSurface(maxPreviewSz);
455 
456                 // Test aeMode and lock
457                 int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
458                 for (int mode : aeModes) {
459                     aeModeAndLockTestByMode(mode);
460                 }
461             } finally {
462                 closeDevice();
463             }
464         }
465     }
466 
467     /** Test {@link CaptureRequest#FLASH_MODE} control.
468      * <p>
469      * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control
470      * and {@link CaptureResult#FLASH_STATE} result.
471      * </p>
472      */
473     @Test
testFlashControl()474     public void testFlashControl() throws Exception {
475         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
476             try {
477                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
478                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
479                             " does not support color outputs, skipping");
480                     continue;
481                 }
482 
483                 openDevice(mCameraIdsUnderTest[i]);
484                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
485                 CaptureRequest.Builder requestBuilder =
486                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
487 
488                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
489 
490                 startPreview(requestBuilder, maxPreviewSz, listener);
491 
492                 // Flash control can only be used when the AE mode is ON or OFF.
493                 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON);
494 
495                 // LEGACY won't support AE mode OFF
496                 boolean aeOffModeSupported = false;
497                 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) {
498                     if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
499                         aeOffModeSupported = true;
500                     }
501                 }
502                 if (aeOffModeSupported) {
503                     flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF);
504                 }
505 
506                 stopPreview();
507             } finally {
508                 closeDevice();
509             }
510         }
511     }
512 
513     /**
514      * Test that the flash can be successfully turned off given various initial and final
515      * AE_CONTROL modes for repeating CaptureRequests.
516      */
517     @Test
testFlashTurnOff()518     public void testFlashTurnOff() throws Exception {
519         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
520             try {
521                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
522                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
523                             " does not support color outputs, skipping");
524                     continue;
525                 }
526                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).hasFlash()) {
527                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
528                             " does not support flash, skipping");
529                     continue;
530                 }
531                 openDevice(mCameraIdsUnderTest[i]);
532                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
533                 CaptureRequest.Builder requestBuilder =
534                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
535 
536                 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
537 
538                 startPreview(requestBuilder, maxPreviewSz, listener);
539                 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, mCameraIdsUnderTest[i]);
540                 flashTurnOffTest(listener, isLegacy,
541                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
542                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
543 
544                 flashTurnOffTest(listener, isLegacy,
545                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
546                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
547 
548                 flashTurnOffTest(listener, isLegacy,
549                         /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
550                         /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
551 
552                 stopPreview();
553             } finally {
554                 closeDevice();
555             }
556         }
557 
558     }
559 
560     /**
561      * Test face detection modes and results.
562      */
563     @Test
testFaceDetection()564     public void testFaceDetection() throws Exception {
565         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
566             try {
567                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
568                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
569                             " does not support color outputs, skipping");
570                     continue;
571                 }
572                 openDevice(mCameraIdsUnderTest[i]);
573                 faceDetectionTestByCamera();
574             } finally {
575                 closeDevice();
576             }
577         }
578     }
579 
580     /**
581      * Test tone map modes and controls.
582      */
583     @Test
testToneMapControl()584     public void testToneMapControl() throws Exception {
585         for (String id : mCameraIdsUnderTest) {
586             try {
587                 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) {
588                     Log.i(TAG, "Camera " + id +
589                             " doesn't support tone mapping controls, skipping test");
590                     continue;
591                 }
592                 openDevice(id);
593                 toneMapTestByCamera();
594             } finally {
595                 closeDevice();
596             }
597         }
598     }
599 
600     /**
601      * Test color correction modes and controls.
602      */
603     @Test
testColorCorrectionControl()604     public void testColorCorrectionControl() throws Exception {
605         for (String id : mCameraIdsUnderTest) {
606             try {
607                 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) {
608                     Log.i(TAG, "Camera " + id +
609                             " doesn't support color correction controls, skipping test");
610                     continue;
611                 }
612                 openDevice(id);
613                 colorCorrectionTestByCamera();
614             } finally {
615                 closeDevice();
616             }
617         }
618     }
619 
620     /**
621      * Test edge mode control for Fps not exceeding 30.
622      */
623     @Test
testEdgeModeControl()624     public void testEdgeModeControl() throws Exception {
625         for (String id : mCameraIdsUnderTest) {
626             try {
627                 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) {
628                     Log.i(TAG, "Camera " + id +
629                             " doesn't support EDGE_MODE controls, skipping test");
630                     continue;
631                 }
632 
633                 openDevice(id);
634                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
635                 edgeModesTestByCamera(fpsRanges);
636             } finally {
637                 closeDevice();
638             }
639         }
640     }
641 
642     /**
643      * Test edge mode control for Fps greater than 30.
644      */
645     @Test
testEdgeModeControlFastFps()646     public void testEdgeModeControlFastFps() throws Exception {
647         for (String id : mCameraIdsUnderTest) {
648             try {
649                 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) {
650                     Log.i(TAG, "Camera " + id +
651                             " doesn't support EDGE_MODE controls, skipping test");
652                     continue;
653                 }
654 
655                 openDevice(id);
656                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
657                 edgeModesTestByCamera(fpsRanges);
658             } finally {
659                 closeDevice();
660             }
661         }
662 
663     }
664 
665     /**
666      * Test focus distance control.
667      */
668     @Test
testFocusDistanceControl()669     public void testFocusDistanceControl() throws Exception {
670         for (String id : mCameraIdsUnderTest) {
671             try {
672                 StaticMetadata staticInfo = mAllStaticInfo.get(id);
673                 if (!staticInfo.hasFocuser()) {
674                     Log.i(TAG, "Camera " + id + " has no focuser, skipping test");
675                     continue;
676                 }
677 
678                 if (!staticInfo.isCapabilitySupported(
679                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
680                     Log.i(TAG, "Camera " + id +
681                             " does not support MANUAL_SENSOR, skipping test");
682                     continue;
683                 }
684 
685                 openDevice(id);
686                 focusDistanceTestByCamera();
687             } finally {
688                 closeDevice();
689             }
690         }
691     }
692 
693     /**
694      * Test noise reduction mode for fps ranges not exceeding 30
695      */
696     @Test
testNoiseReductionModeControl()697     public void testNoiseReductionModeControl() throws Exception {
698         for (String id : mCameraIdsUnderTest) {
699             try {
700                 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) {
701                     Log.i(TAG, "Camera " + id +
702                             " doesn't support noise reduction mode, skipping test");
703                     continue;
704                 }
705 
706                 openDevice(id);
707                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
708                 noiseReductionModeTestByCamera(fpsRanges);
709             } finally {
710                 closeDevice();
711             }
712         }
713     }
714 
715     /**
716      * Test noise reduction mode for fps ranges greater than 30
717      */
718     @Test
testNoiseReductionModeControlFastFps()719     public void testNoiseReductionModeControlFastFps() throws Exception {
720         for (String id : mCameraIdsUnderTest) {
721             try {
722                 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) {
723                     Log.i(TAG, "Camera " + id +
724                             " doesn't support noise reduction mode, skipping test");
725                     continue;
726                 }
727 
728                 openDevice(id);
729                 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo);
730                 noiseReductionModeTestByCamera(fpsRanges);
731             } finally {
732                 closeDevice();
733             }
734         }
735     }
736 
737     /**
738      * Test AWB lock control.
739      *
740      * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p>
741      */
742     @Test
testAwbModeAndLock()743     public void testAwbModeAndLock() throws Exception {
744         for (String id : mCameraIdsUnderTest) {
745             try {
746                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
747                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
748                     continue;
749                 }
750                 openDevice(id);
751                 awbModeAndLockTestByCamera();
752             } finally {
753                 closeDevice();
754             }
755         }
756     }
757 
758     /**
759      * Test different AF modes.
760      */
761     @Test
testAfModes()762     public void testAfModes() throws Exception {
763         for (String id : mCameraIdsUnderTest) {
764             try {
765                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
766                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
767                     continue;
768                 }
769                 openDevice(id);
770                 afModeTestByCamera();
771             } finally {
772                 closeDevice();
773             }
774         }
775     }
776 
777     /**
778      * Test video and optical stabilizations.
779      */
780     @Test
testCameraStabilizations()781     public void testCameraStabilizations() throws Exception {
782         for (String id : mCameraIdsUnderTest) {
783             try {
784                 StaticMetadata staticInfo = mAllStaticInfo.get(id);
785                 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys();
786                 if (!(keys.contains(
787                         CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ||
788                         keys.contains(
789                                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) {
790                     Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes");
791                     continue;
792                 }
793                 if (!staticInfo.isColorOutputSupported()) {
794                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
795                     continue;
796                 }
797                 openDevice(id);
798                 stabilizationTestByCamera();
799             } finally {
800                 closeDevice();
801             }
802         }
803     }
804 
805     /**
806      * Test digitalZoom (center wise and non-center wise), validate the returned crop regions.
807      * The max preview size is used for each camera.
808      */
809     @Test
testDigitalZoom()810     public void testDigitalZoom() throws Exception {
811         for (String id : mCameraIdsUnderTest) {
812             try {
813                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
814                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
815                     continue;
816                 }
817                 openDevice(id);
818                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
819                 digitalZoomTestByCamera(maxPreviewSize);
820             } finally {
821                 closeDevice();
822             }
823         }
824     }
825 
826     /**
827      * Test zoom using CONTROL_ZOOM_RATIO, validate the returned crop regions and zoom ratio.
828      * The max preview size is used for each camera.
829      */
830     @Test
testZoomRatio()831     public void testZoomRatio() throws Exception {
832         for (String id : mCameraIdsUnderTest) {
833             try {
834                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
835                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
836                     continue;
837                 }
838                 openDevice(id);
839                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
840                 zoomRatioTestByCamera(maxPreviewSize);
841             } finally {
842                 closeDevice();
843             }
844         }
845     }
846 
847     /**
848      * Test digital zoom and all preview size combinations.
849      * TODO: this and above test should all be moved to preview test class.
850      */
851     @Test
testDigitalZoomPreviewCombinations()852     public void testDigitalZoomPreviewCombinations() throws Exception {
853         for (String id : mCameraIdsUnderTest) {
854             try {
855                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
856                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
857                     continue;
858                 }
859                 openDevice(id);
860                 digitalZoomPreviewCombinationTestByCamera();
861             } finally {
862                 closeDevice();
863             }
864         }
865     }
866 
867     /**
868      * Test scene mode controls.
869      */
870     @Test
testSceneModes()871     public void testSceneModes() throws Exception {
872         for (String id : mCameraIdsUnderTest) {
873             try {
874                 if (mAllStaticInfo.get(id).isSceneModeSupported()) {
875                     openDevice(id);
876                     sceneModeTestByCamera();
877                 }
878             } finally {
879                 closeDevice();
880             }
881         }
882     }
883 
884     /**
885      * Test effect mode controls.
886      */
887     @Test
testEffectModes()888     public void testEffectModes() throws Exception {
889         for (String id : mCameraIdsUnderTest) {
890             try {
891                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
892                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
893                     continue;
894                 }
895                 openDevice(id);
896                 effectModeTestByCamera();
897             } finally {
898                 closeDevice();
899             }
900         }
901     }
902 
903     /**
904      * Test extended scene mode controls.
905      */
906     @Test
testExtendedSceneModes()907     public void testExtendedSceneModes() throws Exception {
908         for (String id : mCameraIdsUnderTest) {
909             try {
910                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
911                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
912                     continue;
913                 }
914                 openDevice(id);
915                 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo);
916                 extendedSceneModeTestByCamera(fpsRanges);
917             } finally {
918                 closeDevice();
919             }
920         }
921     }
922 
923     // TODO: add 3A state machine test.
924 
925     /**
926      * Per camera dynamic black and white level test.
927      */
dynamicBlackWhiteLevelTestByCamera()928     private void dynamicBlackWhiteLevelTestByCamera() throws Exception {
929         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
930         SimpleImageReaderListener imageListener = null;
931         CaptureRequest.Builder previewBuilder =
932                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
933         CaptureRequest.Builder rawBuilder = null;
934         Size previewSize =
935                 getMaxPreviewSize(mCamera.getId(), mCameraManager,
936                 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
937         Size rawSize = null;
938         boolean canCaptureBlackRaw =
939                 mStaticInfo.isCapabilitySupported(
940                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) &&
941                 mStaticInfo.isOpticalBlackRegionSupported();
942         if (canCaptureBlackRaw) {
943             // Capture Raw16, then calculate the optical black, and use it to check with the dynamic
944             // black level.
945             rawBuilder =
946                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
947             rawSize = mStaticInfo.getRawDimensChecked();
948             imageListener = new SimpleImageReaderListener();
949             prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize,
950                     resultListener, imageListener);
951         } else {
952             startPreview(previewBuilder, previewSize, resultListener);
953         }
954 
955         // Capture a sequence of frames with different sensitivities and validate the black/white
956         // level values
957         int[] sensitivities = getSensitivityTestValues();
958         float[][] dynamicBlackLevels = new float[sensitivities.length][];
959         int[] dynamicWhiteLevels = new int[sensitivities.length];
960         float[][] opticalBlackLevels = new float[sensitivities.length][];
961         for (int i = 0; i < sensitivities.length; i++) {
962             CaptureResult result = null;
963             if (canCaptureBlackRaw) {
964                 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
965                 CaptureRequest rawRequest = rawBuilder.build();
966                 mSession.capture(rawRequest, resultListener, mHandler);
967                 result = resultListener.getCaptureResultForRequest(rawRequest,
968                         NUM_RESULTS_WAIT_TIMEOUT);
969                 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
970 
971                 // Get max (area-wise) optical black region
972                 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get(
973                         CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS);
974                 Rect maxRegion = opticalBlackRegions[0];
975                 for (Rect region : opticalBlackRegions) {
976                     if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) {
977                         maxRegion = region;
978                     }
979                 }
980 
981                 // Get average black pixel values in the region (region is multiple of 2x2)
982                 Image.Plane rawPlane = rawImage.getPlanes()[0];
983                 ByteBuffer rawBuffer = rawPlane.getBuffer();
984                 float[] avgBlackLevels = {0, 0, 0, 0};
985                 final int rowSize = rawPlane.getRowStride();
986                 final int bytePerPixel = rawPlane.getPixelStride();
987                 if (VERBOSE) {
988                     Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " +
989                             rawPlane.getRowStride());
990                 }
991                 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) {
992                     for (int col = maxRegion.left; col < maxRegion.right; col += 2) {
993                         int startOffset = row * rowSize + col * bytePerPixel;
994                         avgBlackLevels[0] += rawBuffer.getShort(startOffset);
995                         avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel);
996                         startOffset += rowSize;
997                         avgBlackLevels[2] += rawBuffer.getShort(startOffset);
998                         avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel);
999                     }
1000                 }
1001                 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2);
1002                 for (int m = 0; m < avgBlackLevels.length; m++) {
1003                     avgBlackLevels[m] /= numBlackBlocks;
1004                 }
1005                 opticalBlackLevels[i] = avgBlackLevels;
1006 
1007                 if (VERBOSE) {
1008                     Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s",
1009                             sensitivities[i], Arrays.toString(avgBlackLevels)));
1010                 }
1011 
1012                 rawImage.close();
1013             } else {
1014                 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]);
1015                 CaptureRequest previewRequest = previewBuilder.build();
1016                 mSession.capture(previewRequest, resultListener, mHandler);
1017                 result = resultListener.getCaptureResultForRequest(previewRequest,
1018                         NUM_RESULTS_WAIT_TIMEOUT);
1019             }
1020 
1021             dynamicBlackLevels[i] = getValueNotNull(result,
1022                     CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL);
1023             dynamicWhiteLevels[i] = getValueNotNull(result,
1024                     CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL);
1025         }
1026 
1027         if (VERBOSE) {
1028             Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities));
1029             Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels));
1030             Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels));
1031             if (canCaptureBlackRaw) {
1032                 Log.v(TAG, "Optical black level results " +
1033                         Arrays.deepToString(opticalBlackLevels));
1034             }
1035         }
1036 
1037         // check the dynamic black level against global black level.
1038         // Implicit guarantee: if the dynamic black level is supported, fixed black level must be
1039         // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions).
1040         BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get(
1041                 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN);
1042         int[] fixedBlackLevels = new int[4];
1043         int fixedWhiteLevel = mStaticInfo.getCharacteristics().get(
1044                 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL);
1045         blackPattern.copyTo(fixedBlackLevels, 0);
1046         float maxBlackDeviation = 0;
1047         int maxWhiteDeviation = 0;
1048         for (int i = 0; i < dynamicBlackLevels.length; i++) {
1049             for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
1050                 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) {
1051                     maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]);
1052                 }
1053             }
1054             if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) {
1055                 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel);
1056             }
1057         }
1058         mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level"
1059                 + " exceed threshold."
1060                 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels),
1061                 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation);
1062         mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold."
1063                 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels),
1064                 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN,
1065                 (float)maxWhiteDeviation);
1066 
1067         // Validate against optical black levels if it is available
1068         if (canCaptureBlackRaw) {
1069             maxBlackDeviation = 0;
1070             for (int i = 0; i < dynamicBlackLevels.length; i++) {
1071                 for (int j = 0; j < dynamicBlackLevels[i].length; j++) {
1072                     if (maxBlackDeviation <
1073                             Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) {
1074                         maxBlackDeviation =
1075                                 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]);
1076                     }
1077                 }
1078             }
1079 
1080             mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black"
1081                     + " exceed threshold."
1082                     + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)
1083                     + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels),
1084                     fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN,
1085                     maxBlackDeviation);
1086         }
1087     }
1088 
noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1089     private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
1090         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1091         CaptureRequest.Builder requestBuilder =
1092                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1093         int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked();
1094 
1095         for (int mode : availableModes) {
1096             requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode);
1097 
1098             // Test that OFF and FAST mode should not slow down the frame rate.
1099             if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF ||
1100                     mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) {
1101                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
1102             }
1103 
1104             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1105             startPreview(requestBuilder, maxPrevSize, resultListener);
1106             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1107             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1108 
1109             verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode,
1110                     resultListener, NUM_FRAMES_VERIFIED);
1111         }
1112 
1113         stopPreview();
1114     }
1115 
focusDistanceTestByCamera()1116     private void focusDistanceTestByCamera() throws Exception {
1117         CaptureRequest.Builder requestBuilder =
1118                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1119         requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
1120         int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked();
1121         float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED;
1122         if (calibrationStatus ==
1123                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) {
1124             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED;
1125         } else if (calibrationStatus ==
1126                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) {
1127             errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE;
1128         }
1129 
1130         // Test changing focus distance with repeating request
1131         focusDistanceTestRepeating(requestBuilder, errorMargin);
1132 
1133         if (calibrationStatus ==
1134                 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED)  {
1135             // Test changing focus distance with burst request
1136             focusDistanceTestBurst(requestBuilder, errorMargin);
1137         }
1138     }
1139 
focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1140     private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder,
1141             float errorMargin) throws Exception {
1142         CaptureRequest request;
1143         float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0);
1144         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1145         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1146         startPreview(requestBuilder, maxPrevSize, resultListener);
1147 
1148         float[] resultDistances = new float[testDistances.length];
1149         int[] resultLensStates = new int[testDistances.length];
1150 
1151         // Collect results
1152         for (int i = 0; i < testDistances.length; i++) {
1153             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1154             request = requestBuilder.build();
1155             resultListener = new SimpleCaptureCallback();
1156             mSession.setRepeatingRequest(request, resultListener, mHandler);
1157             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1158             waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1159                     CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1160             CaptureResult result = resultListener.getCaptureResultForRequest(request,
1161                     NUM_RESULTS_WAIT_TIMEOUT);
1162 
1163             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1164             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1165 
1166             if (VERBOSE) {
1167                 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i]
1168                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1169             }
1170         }
1171 
1172         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1173                 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0,
1174                 errorMargin);
1175 
1176         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1177 
1178             // Test hyperfocal distance optionally
1179             float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1180             if (hyperFocalDistance > 0) {
1181                 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance);
1182                 request = requestBuilder.build();
1183                 resultListener = new SimpleCaptureCallback();
1184                 mSession.setRepeatingRequest(request, resultListener, mHandler);
1185                 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1186 
1187                 // Then wait for the lens.state to be stationary.
1188                 waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1189                         CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1190                 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1191                 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1192                 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" +
1193                         " requested value", focusDistance,
1194                         hyperFocalDistance * (1.0f - errorMargin),
1195                         hyperFocalDistance * (1.0f + errorMargin));
1196             }
1197         }
1198     }
1199 
focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1200     private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder,
1201             float errorMargin) throws Exception {
1202 
1203         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1204         float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT,
1205                 NUM_FOCUS_DISTANCES_REPEAT);
1206         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1207         startPreview(requestBuilder, maxPrevSize, resultListener);
1208 
1209         float[] resultDistances = new float[testDistances.length];
1210         int[] resultLensStates = new int[testDistances.length];
1211 
1212         final int maxPipelineDepth = mStaticInfo.getCharacteristics().get(
1213             CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
1214 
1215         // Move lens to starting position, and wait for the lens.state to be stationary.
1216         CaptureRequest request;
1217         requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]);
1218         request = requestBuilder.build();
1219         mSession.setRepeatingRequest(request, resultListener, mHandler);
1220         waitForResultValue(resultListener, CaptureResult.LENS_STATE,
1221                 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT);
1222 
1223         // Submit burst of requests with different focus distances
1224         List<CaptureRequest> burst = new ArrayList<>();
1225         for (int i = 0; i < testDistances.length; i ++) {
1226             requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]);
1227             burst.add(requestBuilder.build());
1228         }
1229         mSession.captureBurst(burst, resultListener, mHandler);
1230 
1231         for (int i = 0; i < testDistances.length; i++) {
1232             CaptureResult result = resultListener.getCaptureResultForRequest(
1233                     burst.get(i), maxPipelineDepth+1);
1234 
1235             resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE);
1236             resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE);
1237 
1238             if (VERBOSE) {
1239                 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i]
1240                         + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]);
1241             }
1242         }
1243 
1244         verifyFocusDistance(testDistances, resultDistances, resultLensStates,
1245                 /*ascendingOrder*/true, /*noOvershoot*/true,
1246                 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT,
1247                 errorMargin);
1248 
1249     }
1250 
1251     /**
1252      * Verify focus distance control.
1253      *
1254      * Assumption:
1255      * - First repeatStart+1 elements of requestedDistances share the same value
1256      * - Last repeatEnd+1 elements of requestedDistances share the same value
1257      * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder.
1258      * - Focuser is at requestedDistances[0] at the beginning of the test.
1259      *
1260      * @param requestedDistances The requested focus distances
1261      * @param resultDistances The result focus distances
1262      * @param lensStates The result lens states
1263      * @param ascendingOrder The order of the expected focus distance request/output
1264      * @param noOvershoot Assert that focus control doesn't overshoot the requested value
1265      * @param repeatStart The number of times the starting focus distance is repeated
1266      * @param repeatEnd The number of times the ending focus distance is repeated
1267      * @param errorMargin The error margin between request and result
1268      */
verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1269     private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances,
1270             int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart,
1271             int repeatEnd, float errorMargin) {
1272 
1273         float minValue = 0;
1274         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
1275         float hyperfocalDistance = 0;
1276         if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) {
1277             hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked();
1278         }
1279 
1280         // Verify lens and focus distance do not change for first repeatStart
1281         // results.
1282         for (int i = 0; i < repeatStart; i ++) {
1283             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1284             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1285             float marginMax =
1286                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1287 
1288             mCollector.expectEquals("Lens moves even though focus_distance didn't change",
1289                     lensStates[i], CaptureResult.LENS_STATE_STATIONARY);
1290             if (noOvershoot) {
1291                 mCollector.expectInRange("Focus distance in result should be close enough to " +
1292                         "requested value", resultDistances[i], marginMin, marginMax);
1293             }
1294             mCollector.expectInRange("Result focus distance is out of range",
1295                     resultDistances[i], minValue, maxValue);
1296         }
1297 
1298         for (int i = repeatStart; i < resultDistances.length-1; i ++) {
1299             float marginMin = requestedDistances[i] * (1.0f - errorMargin);
1300             // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal].
1301             float marginMax =
1302                     Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin);
1303             if (noOvershoot) {
1304                 // Result focus distance shouldn't overshoot the request
1305                 boolean condition;
1306                 if (ascendingOrder) {
1307                     condition = resultDistances[i] <= marginMax;
1308                } else {
1309                     condition = resultDistances[i] >= marginMin;
1310                 }
1311                 mCollector.expectTrue(String.format(
1312                       "Lens shouldn't move past request focus distance. result " +
1313                       resultDistances[i] + " vs target of " +
1314                       (ascendingOrder ? marginMax : marginMin)), condition);
1315             }
1316 
1317             // Verify monotonically increased focus distance setting
1318             boolean condition;
1319             float compareDistance = resultDistances[i+1] - resultDistances[i];
1320             if (i < resultDistances.length-1-repeatEnd) {
1321                 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0);
1322             } else {
1323                 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0);
1324             }
1325             mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results ["
1326                   + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + ","
1327                   + lensStates[i+1] + "] monotonicity is broken"), condition);
1328         }
1329 
1330         mCollector.expectTrue(String.format("All values of this array are equal: " +
1331                 resultDistances[0] + " " + resultDistances[resultDistances.length-1]),
1332                 resultDistances[0] != resultDistances[resultDistances.length-1]);
1333 
1334         // Verify lens moved to destination location.
1335         mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] +
1336                 " for minFocusDistance should be closed enough to requested value " +
1337                 requestedDistances[requestedDistances.length-1],
1338                 resultDistances[resultDistances.length-1],
1339                 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin),
1340                 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin));
1341     }
1342 
1343     /**
1344      * Verify edge mode control results for fpsRanges
1345      */
edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1346     private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
1347         Size maxPrevSize = mOrderedPreviewSizes.get(0);
1348         int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked();
1349         CaptureRequest.Builder requestBuilder =
1350                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1351 
1352         for (int mode : edgeModes) {
1353             requestBuilder.set(CaptureRequest.EDGE_MODE, mode);
1354 
1355             // Test that OFF and FAST mode should not slow down the frame rate.
1356             if (mode == CaptureRequest.EDGE_MODE_OFF ||
1357                     mode == CaptureRequest.EDGE_MODE_FAST) {
1358                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
1359             }
1360 
1361             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1362             startPreview(requestBuilder, maxPrevSize, resultListener);
1363             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
1364             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1365 
1366             verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener,
1367                     NUM_FRAMES_VERIFIED);
1368        }
1369 
1370         stopPreview();
1371     }
1372 
1373     /**
1374      * Test color correction controls.
1375      *
1376      * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test
1377      * the unit gain and identity transform.</p>
1378      */
colorCorrectionTestByCamera()1379     private void colorCorrectionTestByCamera() throws Exception {
1380         CaptureRequest request;
1381         CaptureResult result;
1382         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
1383         updatePreviewSurface(maxPreviewSz);
1384         CaptureRequest.Builder manualRequestBuilder = createRequestForPreview();
1385         CaptureRequest.Builder previewRequestBuilder = createRequestForPreview();
1386         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1387 
1388         startPreview(previewRequestBuilder, maxPreviewSz, listener);
1389 
1390         // Default preview result should give valid color correction metadata.
1391         result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1392         validateColorCorrectionResult(result,
1393                 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE));
1394         int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
1395         // TRANSFORM_MATRIX mode
1396         // Only test unit gain and identity transform
1397         List<Integer> availableControlModes = Arrays.asList(
1398                 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked()));
1399         List<Integer> availableAwbModes = Arrays.asList(
1400                 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked()));
1401         boolean isManualCCSupported =
1402                 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) ||
1403                 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF);
1404         if (isManualCCSupported) {
1405             if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) {
1406                 // Only manual AWB mode is supported
1407                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1408                         CaptureRequest.CONTROL_MODE_AUTO);
1409                 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1410                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1411             } else {
1412                 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode.
1413                 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE,
1414                         CaptureRequest.CONTROL_MODE_OFF);
1415             }
1416 
1417             RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f);
1418 
1419             ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform(
1420                 new Rational[] {
1421                     ONE_R, ZERO_R, ZERO_R,
1422                     ZERO_R, ONE_R, ZERO_R,
1423                     ZERO_R, ZERO_R, ONE_R
1424                 });
1425 
1426             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1427             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN);
1428             manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM);
1429             request = manualRequestBuilder.build();
1430             mSession.capture(request, listener, mHandler);
1431             result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1432             RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS);
1433             ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM);
1434             validateColorCorrectionResult(result, colorCorrectionMode);
1435             mCollector.expectEquals("control mode result/request mismatch",
1436                     CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE));
1437             mCollector.expectEquals("Color correction gain result/request mismatch",
1438                     UNIT_GAIN, gains);
1439             mCollector.expectEquals("Color correction gain result/request mismatch",
1440                     IDENTITY_TRANSFORM, transform);
1441 
1442         }
1443 
1444         // FAST mode
1445         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST;
1446         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1447         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1448         request = manualRequestBuilder.build();
1449         mSession.capture(request, listener, mHandler);
1450         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1451         validateColorCorrectionResult(result, colorCorrectionMode);
1452         mCollector.expectEquals("control mode result/request mismatch",
1453                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1454 
1455         // HIGH_QUALITY mode
1456         colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY;
1457         manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1458         manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode);
1459         request = manualRequestBuilder.build();
1460         mSession.capture(request, listener, mHandler);
1461         result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT);
1462         validateColorCorrectionResult(result, colorCorrectionMode);
1463         mCollector.expectEquals("control mode result/request mismatch",
1464                 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE));
1465     }
1466 
validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1467     private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) {
1468         final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0);
1469         final int TRANSFORM_SIZE = 9;
1470         Rational[] zeroTransform = new Rational[TRANSFORM_SIZE];
1471         Arrays.fill(zeroTransform, ZERO_R);
1472         final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform);
1473 
1474         RggbChannelVector resultGain;
1475         if ((resultGain = mCollector.expectKeyValueNotNull(result,
1476                 CaptureResult.COLOR_CORRECTION_GAINS)) != null) {
1477             mCollector.expectKeyValueNotEquals(result,
1478                     CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS);
1479         }
1480 
1481         ColorSpaceTransform resultTransform;
1482         if ((resultTransform = mCollector.expectKeyValueNotNull(result,
1483                 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) {
1484             mCollector.expectKeyValueNotEquals(result,
1485                     CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM);
1486         }
1487 
1488         mCollector.expectEquals("color correction mode result/request mismatch",
1489                 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE));
1490     }
1491 
1492     /**
1493      * Test that flash can be turned off successfully with a given initial and final AE_CONTROL
1494      * states.
1495      *
1496      * This function expects that initialAeControl and flashOffAeControl will not be either
1497      * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF
1498      *
1499      * @param listener The Capture listener that is used to wait for capture result
1500      * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with.
1501      * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for
1502      *        TEMPLATE_PREVIEW repeating requests.
1503      */
flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1504     private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy,
1505             int initialAeControl, int flashOffAeControl) throws Exception {
1506         CaptureResult result;
1507         final int NUM_FLASH_REQUESTS_TESTED = 10;
1508         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1509         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
1510         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl);
1511 
1512         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1513         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1514 
1515         // Turn on torch using FLASH_MODE_TORCH
1516         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
1517         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1518         CaptureRequest torchOnRequest = requestBuilder.build();
1519         mSession.setRepeatingRequest(torchOnRequest, listener, mHandler);
1520         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH);
1521         result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT);
1522         // Test that the flash actually turned on continuously.
1523         mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED,
1524                 result.get(CaptureResult.FLASH_STATE));
1525         mSession.stopRepeating();
1526         // Turn off the torch
1527         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl);
1528         // TODO: jchowdhary@, b/130323585, this line can be removed.
1529         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1530         int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC;
1531         if (mStaticInfo.isPerFrameControlSupported()) {
1532            numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC;
1533 
1534         }
1535         // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode
1536         // transitions. The additional request is to check for at least 1 expected (FIRED / READY)
1537         // state.
1538         int numTorchTestSamples =  2 * numAllowedTransitionStates  + 1;
1539         CaptureRequest flashOffRequest = requestBuilder.build();
1540         int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest,
1541                 numTorchTestSamples, listener, mHandler);
1542         // Turn it on again.
1543         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1544         // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to
1545         // turn the torch on again.
1546         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
1547         CaptureRequest flashModeTorchRequest = requestBuilder.build();
1548         int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest,
1549                 numTorchTestSamples, listener, mHandler);
1550 
1551         CaptureResult[] torchStateResults =
1552                 new CaptureResult[flashModeTorchRequests + flashModeOffRequests];
1553         Arrays.fill(torchStateResults, null);
1554         int i = 0;
1555         for (; i < flashModeOffRequests; i++) {
1556             torchStateResults[i] =
1557                     listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT);
1558             mCollector.expectNotEquals("Result for flashModeOff request null",
1559                     torchStateResults[i], null);
1560         }
1561         for (int j = i; j < torchStateResults.length; j++) {
1562             torchStateResults[j] =
1563                     listener.getCaptureResultForRequest(flashModeTorchRequest,
1564                             NUM_RESULTS_WAIT_TIMEOUT);
1565             mCollector.expectNotEquals("Result for flashModeTorch request null",
1566                     torchStateResults[j], null);
1567         }
1568         if (isLegacy) {
1569             // For LEGACY devices, flash state is null for all situations except:
1570             // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED
1571             // android.flash.mode == TORCH, where flash.state will be FIRED
1572             testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest);
1573             testLegacyTorchStates(torchStateResults, flashModeOffRequests,
1574                     torchStateResults.length -1,
1575                     flashModeTorchRequest);
1576         } else {
1577             checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests,
1578                     flashModeTorchRequests);
1579         }
1580     }
1581 
testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1582     private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end,
1583             CaptureRequest request) {
1584         for (int i = beg; i <= end; i++) {
1585             Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE);
1586             Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE);
1587             Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE);
1588             if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
1589                     requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) {
1590                 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" +
1591                         "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " +
1592                         "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " +
1593                         requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState);
1594                 continue;
1595             }
1596             mCollector.expectTrue("For LEGACY devices, flash state must be null when" +
1597                         "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " +
1598                         "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " +
1599                         requestFlashMode,  resultFlashState == null);
1600         }
1601     }
1602     // We check that torch states appear in the order expected. We don't necessarily know how many
1603     // times each state might appear, however we make sure that the states do not appear out of
1604     // order.
checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1605     private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end,
1606             List<Integer> stateOrder, boolean isTurningOff) {
1607         Integer flashState;
1608         Integer curIndex = 0;
1609         for (int i = beg; i <= end; i++) {
1610             flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE);
1611             int index = stateOrder.indexOf(flashState);
1612             mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" +
1613                     stateOrder, index, -1);
1614             mCollector.expectGreaterOrEqual("state " + flashState  + " index " + index +
1615                     " is expected to be >= " + curIndex,
1616                     curIndex, index);
1617             curIndex = index;
1618         }
1619     }
1620 
checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1621     private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates,
1622             int numTorchOffSamples, int numTorchOnSamples) {
1623         // We test for flash states from request:
1624         // Request:       O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON)
1625         // Valid Result : P/R  P/R  P/R  R R R...P/R P/R   P/F  P/F  P/F      F         F
1626         // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the
1627         // transition states while switching the torch off, it must not transition to
1628         // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on.
1629         // P - FLASH_STATE_PARTIAL
1630         // R - FLASH_STATE_READY
1631         // F - FLASH_STATE_FIRED
1632         // O(k) - kth FLASH_MODE_OFF request
1633         // T(k) - kth FLASH_MODE_TORCH request
1634         // nOFF - number of torch off samples
1635         // nON - number of torch on samples
1636         Integer flashState;
1637         // Check on -> off transition states
1638         List<Integer> onToOffStateOrderList = new ArrayList<Integer>();
1639         onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1640         onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY);
1641         checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates,
1642                 onToOffStateOrderList, true);
1643         // The next frames (before transition) must have its flash state as FLASH_STATE_READY
1644         for (int i = numAllowedTransitionStates + 1;
1645                 i < numTorchOffSamples - numAllowedTransitionStates; i++) {
1646             flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE);
1647             mCollector.expectEquals("flash state result must be READY",
1648                     CaptureResult.FLASH_STATE_READY, flashState);
1649         }
1650         // check off -> on transition states, before the FLASH_MODE_TORCH request was sent
1651         List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>();
1652         offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY);
1653         offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1654         checkTorchTransitionStates(torchResults,
1655                 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1,
1656                 offToOnPreStateOrderList, false);
1657         // check off -> on transition states
1658         List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>();
1659         offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL);
1660         offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED);
1661         checkTorchTransitionStates(torchResults,
1662                 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates,
1663                 offToOnPostStateOrderList, false);
1664         // check on states after off -> on transition
1665         // The next frames must have its flash state as FLASH_STATE_FIRED
1666         for (int i = numTorchOffSamples + numAllowedTransitionStates + 1;
1667                 i < torchResults.length - 1; i++) {
1668             flashState = torchResults[i].get(CaptureResult.FLASH_STATE);
1669             mCollector.expectEquals("flash state result must be FIRED for frame " + i,
1670                     CaptureRequest.FLASH_STATE_FIRED, flashState);
1671         }
1672     }
1673 
1674     /**
1675      * Test flash mode control by AE mode.
1676      * <p>
1677      * Only allow AE mode ON or OFF, because other AE mode could run into conflict with
1678      * flash manual control. This function expects the camera to already have an active
1679      * repeating request and be sending results to the listener.
1680      * </p>
1681      *
1682      * @param listener The Capture listener that is used to wait for capture result
1683      * @param aeMode The AE mode for flash to test with
1684      */
flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1685     private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception {
1686         CaptureResult result;
1687         final int NUM_FLASH_REQUESTS_TESTED = 10;
1688         CaptureRequest.Builder requestBuilder = createRequestForPreview();
1689 
1690         if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) {
1691             requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
1692         } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) {
1693             changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY);
1694         } else {
1695             throw new IllegalArgumentException("This test only works when AE mode is ON or OFF");
1696         }
1697 
1698         mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
1699         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1700 
1701         // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE.
1702         if (mStaticInfo.getFlashInfoChecked() == false) {
1703             for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1704                 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1705                 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE"
1706                         + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE,
1707                         result.get(CaptureResult.FLASH_STATE));
1708             }
1709 
1710             return;
1711         }
1712 
1713         // Test flash SINGLE mode control. Wait for flash state to be READY first.
1714         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
1715             waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY,
1716                     NUM_RESULTS_WAIT_TIMEOUT);
1717         } // else the settings were already waited on earlier
1718 
1719         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
1720         CaptureRequest flashSinglerequest = requestBuilder.build();
1721 
1722         int flashModeSingleRequests = captureRequestsSynchronized(
1723                 flashSinglerequest, listener, mHandler);
1724         waitForNumResults(listener, flashModeSingleRequests - 1);
1725         result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT);
1726         // Result mode must be SINGLE, state must be FIRED.
1727         mCollector.expectEquals("Flash mode result must be SINGLE",
1728                 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE));
1729         mCollector.expectEquals("Flash state result must be FIRED",
1730                 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1731 
1732         // Test flash TORCH mode control.
1733         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
1734         CaptureRequest torchRequest = requestBuilder.build();
1735 
1736         int flashModeTorchRequests = captureRequestsSynchronized(torchRequest,
1737                 NUM_FLASH_REQUESTS_TESTED, listener, mHandler);
1738         waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED);
1739 
1740         // Verify the results
1741         TorchSeqState state = TorchSeqState.RAMPING_UP;
1742         for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) {
1743             result = listener.getCaptureResultForRequest(torchRequest,
1744                     NUM_RESULTS_WAIT_TIMEOUT);
1745             int flashMode = result.get(CaptureResult.FLASH_MODE);
1746             int flashState = result.get(CaptureResult.FLASH_STATE);
1747             // Result mode must be TORCH
1748             mCollector.expectEquals("Flash mode result " + i + " must be TORCH",
1749                     CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE));
1750             if (state == TorchSeqState.RAMPING_UP &&
1751                     flashState == CaptureResult.FLASH_STATE_FIRED) {
1752                 state = TorchSeqState.FIRED;
1753             } else if (state == TorchSeqState.FIRED &&
1754                     flashState == CaptureResult.FLASH_STATE_PARTIAL) {
1755                 state = TorchSeqState.RAMPING_DOWN;
1756             }
1757 
1758             if (i == 0 && mStaticInfo.isPerFrameControlSupported()) {
1759                 mCollector.expectTrue(
1760                         "Per frame control device must enter FIRED state on first torch request",
1761                         state == TorchSeqState.FIRED);
1762             }
1763 
1764             if (state == TorchSeqState.FIRED) {
1765                 mCollector.expectEquals("Flash state result " + i + " must be FIRED",
1766                         CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE));
1767             } else {
1768                 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL",
1769                         CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE));
1770             }
1771         }
1772         mCollector.expectTrue("Torch state FIRED never seen",
1773                 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN);
1774 
1775         // Test flash OFF mode control
1776         requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1777         CaptureRequest flashOffrequest = requestBuilder.build();
1778 
1779         int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler);
1780         waitForNumResults(listener, flashModeOffRequests - 1);
1781         result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT);
1782         mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF,
1783                 result.get(CaptureResult.FLASH_MODE));
1784     }
1785 
verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1786     private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified,
1787             int mode, boolean isAeManual, long requestExpTime) throws Exception {
1788         // Skip the first a couple of frames as antibanding may not be fully up yet.
1789         final int NUM_FRAMES_SKIPPED = 5;
1790         for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) {
1791             listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1792         }
1793 
1794         for (int i = 0; i < numFramesVerified; i++) {
1795             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1796             Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
1797             assertNotNull("Exposure time shouldn't be null", resultExpTime);
1798             Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER);
1799             // Scene flicker result should be always available.
1800             assertNotNull("Scene flicker must not be null", flicker);
1801             assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE &&
1802                     flicker <= STATISTICS_SCENE_FLICKER_60HZ);
1803 
1804             Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE);
1805             assertNotNull("antiBanding mode shouldn't be null", antiBandMode);
1806             assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode,
1807                     antiBandMode == mode);
1808             if (isAeManual) {
1809                 // First, round down not up, second, need close enough.
1810                 validateExposureTime(requestExpTime, resultExpTime);
1811                 return;
1812             }
1813 
1814             long expectedExpTime = resultExpTime; // Default, no exposure adjustment.
1815             if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) {
1816                 // result exposure time must be adjusted by 50Hz illuminant source.
1817                 expectedExpTime =
1818                         getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1819             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) {
1820                 // result exposure time must be adjusted by 60Hz illuminant source.
1821                 expectedExpTime =
1822                         getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1823             } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){
1824                 /**
1825                  * Use STATISTICS_SCENE_FLICKER to tell the illuminant source
1826                  * and do the exposure adjustment.
1827                  */
1828                 expectedExpTime = resultExpTime;
1829                 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) {
1830                     expectedExpTime =
1831                             getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime);
1832                 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) {
1833                     expectedExpTime =
1834                             getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime);
1835                 }
1836             }
1837 
1838             if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) {
1839                 mCollector.addMessage(String.format("Result exposure time %dns diverges too much"
1840                         + " from expected exposure time %dns for mode %d when AE is auto",
1841                         resultExpTime, expectedExpTime, mode));
1842             }
1843         }
1844     }
1845 
antiBandingTestByMode(Size size, int mode)1846     private void antiBandingTestByMode(Size size, int mode)
1847             throws Exception {
1848         if(VERBOSE) {
1849             Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId());
1850         }
1851         CaptureRequest.Builder requestBuilder =
1852                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1853 
1854         requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode);
1855 
1856         // Test auto AE mode anti-banding behavior
1857         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1858         startPreview(requestBuilder, size, resultListener);
1859         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1860         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false,
1861                 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK);
1862 
1863         // Test manual AE mode anti-banding behavior
1864         // 65ms, must be supported by full capability devices.
1865         final long TEST_MANUAL_EXP_TIME_NS = 65000000L;
1866         long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS);
1867         changeExposure(requestBuilder, manualExpTime);
1868         resultListener = new SimpleCaptureCallback();
1869         startPreview(requestBuilder, size, resultListener);
1870         waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
1871         verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true,
1872                 manualExpTime);
1873 
1874         stopPreview();
1875     }
1876 
1877     /**
1878      * Test the all available AE modes and AE lock.
1879      * <p>
1880      * For manual AE mode, test iterates through different sensitivities and
1881      * exposure times, validate the result exposure time correctness. For
1882      * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested.
1883      * For the rest of the AUTO mode, AE lock is tested.
1884      * </p>
1885      *
1886      * @param mode
1887      */
aeModeAndLockTestByMode(int mode)1888     private void aeModeAndLockTestByMode(int mode)
1889             throws Exception {
1890         switch (mode) {
1891             case CONTROL_AE_MODE_OFF:
1892                 if (mStaticInfo.isCapabilitySupported(
1893                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
1894                     // Test manual exposure control.
1895                     aeManualControlTest();
1896                 } else {
1897                     Log.w(TAG,
1898                             "aeModeAndLockTestByMode - can't test AE mode OFF without " +
1899                             "manual sensor control");
1900                 }
1901                 break;
1902             case CONTROL_AE_MODE_ON:
1903             case CONTROL_AE_MODE_ON_AUTO_FLASH:
1904             case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1905             case CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1906             case CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
1907                 // Test AE lock for above AUTO modes.
1908                 aeAutoModeTestLock(mode);
1909                 break;
1910             default:
1911                 throw new UnsupportedOperationException("Unhandled AE mode " + mode);
1912         }
1913     }
1914 
1915     /**
1916      * Test AE auto modes.
1917      * <p>
1918      * Use single request rather than repeating request to test AE lock per frame control.
1919      * </p>
1920      */
aeAutoModeTestLock(int mode)1921     private void aeAutoModeTestLock(int mode) throws Exception {
1922         CaptureRequest.Builder requestBuilder =
1923                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1924         if (mStaticInfo.isAeLockSupported()) {
1925             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1926         }
1927         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode);
1928         configurePreviewOutput(requestBuilder);
1929 
1930         final int MAX_NUM_CAPTURES_DURING_LOCK = 5;
1931         for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) {
1932             autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i);
1933         }
1934     }
1935 
1936     /**
1937      * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
1938      * the first capture result after the AE lock. The right AE lock behavior is:
1939      * When it is locked, it locks to the current exposure value, and all subsequent
1940      * request with lock ON will have the same exposure value locked.
1941      */
autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)1942     private void autoAeMultipleCapturesThenTestLock(
1943             CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)
1944             throws Exception {
1945         if (numCapturesDuringLock < 1) {
1946             throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
1947         }
1948         if (VERBOSE) {
1949             Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode "
1950                     + aeMode + " with " + numCapturesDuringLock + " captures before lock");
1951         }
1952 
1953         final int NUM_CAPTURES_BEFORE_LOCK = 2;
1954         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
1955 
1956         CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
1957         boolean canSetAeLock = mStaticInfo.isAeLockSupported();
1958 
1959         // Reset the AE lock to OFF, since we are reusing this builder many times
1960         if (canSetAeLock) {
1961             requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
1962         }
1963 
1964         // Just send several captures with auto AE, lock off.
1965         CaptureRequest request = requestBuilder.build();
1966         for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
1967             mSession.capture(request, listener, mHandler);
1968         }
1969         waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
1970 
1971         if (!canSetAeLock) {
1972             // Without AE lock, the remaining tests items won't work
1973             return;
1974         }
1975 
1976         // Then fire several capture to lock the AE.
1977         requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1978 
1979         int requestCount = captureRequestsSynchronized(
1980                 requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
1981 
1982         int[] sensitivities = new int[numCapturesDuringLock];
1983         long[] expTimes = new long[numCapturesDuringLock];
1984         Arrays.fill(sensitivities, -1);
1985         Arrays.fill(expTimes, -1L);
1986 
1987         // Get the AE lock on result and validate the exposure values.
1988         waitForNumResults(listener, requestCount - numCapturesDuringLock);
1989         for (int i = 0; i < resultsDuringLock.length; i++) {
1990             resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
1991         }
1992 
1993         for (int i = 0; i < numCapturesDuringLock; i++) {
1994             mCollector.expectKeyValueEquals(
1995                     resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
1996         }
1997 
1998         // Can't read manual sensor/exposure settings without manual sensor
1999         if (mStaticInfo.isCapabilitySupported(
2000                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
2001             int sensitivityLocked =
2002                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
2003             long expTimeLocked =
2004                     getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
2005             for (int i = 1; i < resultsDuringLock.length; i++) {
2006                 mCollector.expectKeyValueEquals(
2007                         resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
2008                 mCollector.expectKeyValueEquals(
2009                         resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
2010             }
2011         }
2012     }
2013 
2014     /**
2015      * Iterate through exposure times and sensitivities for manual AE control.
2016      * <p>
2017      * Use single request rather than repeating request to test manual exposure
2018      * value change per frame control.
2019      * </p>
2020      */
aeManualControlTest()2021     private void aeManualControlTest()
2022             throws Exception {
2023         CaptureRequest.Builder requestBuilder =
2024                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2025         configurePreviewOutput(requestBuilder);
2026 
2027         // Warm up pipeline for more accurate timing
2028         SimpleCaptureCallback warmupListener =  new SimpleCaptureCallback();
2029         mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler);
2030         warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2031 
2032         // Do manual captures
2033         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
2034         SimpleCaptureCallback listener =  new SimpleCaptureCallback();
2035 
2036         long[] expTimesNs = getExposureTimeTestValues();
2037         int[] sensitivities = getSensitivityTestValues();
2038         // Submit single request at a time, then verify the result.
2039         for (int i = 0; i < expTimesNs.length; i++) {
2040             for (int j = 0; j < sensitivities.length; j++) {
2041                 if (VERBOSE) {
2042                     Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity "
2043                             + sensitivities[j] + ", exposure time " + expTimesNs[i] + "ns");
2044                 }
2045 
2046                 changeExposure(requestBuilder, expTimesNs[i], sensitivities[j]);
2047                 mSession.capture(requestBuilder.build(), listener, mHandler);
2048 
2049                 // make sure timeout is long enough for long exposure time - add a 2x safety margin
2050                 // to exposure time
2051                 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * expTimesNs[i] / 1000000;
2052                 CaptureResult result = listener.getCaptureResult(timeoutMs);
2053                 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2054                 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
2055                 validateExposureTime(expTimesNs[i], resultExpTimeNs);
2056                 validateSensitivity(sensitivities[j], resultSensitivity);
2057                 validateFrameDurationForCapture(result);
2058             }
2059         }
2060         mSession.stopRepeating();
2061 
2062         // TODO: Add another case to test where we can submit all requests, then wait for
2063         // results, which will hide the pipeline latency. this is not only faster, but also
2064         // test high speed per frame control and synchronization.
2065     }
2066 
2067 
2068     /**
2069      * Verify black level lock control.
2070      */
verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2071     private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified,
2072             int maxLockOffCnt) throws Exception {
2073         int noLockCnt = 0;
2074         for (int i = 0; i < numFramesVerified; i++) {
2075             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2076             Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK);
2077             assertNotNull("Black level lock result shouldn't be null", blackLevelLock);
2078 
2079             // Count the lock == false result, which could possibly occur at most once.
2080             if (blackLevelLock == false) {
2081                 noLockCnt++;
2082             }
2083 
2084             if(VERBOSE) {
2085                 Log.v(TAG, "Black level lock result: " + blackLevelLock);
2086             }
2087         }
2088         assertTrue("Black level lock OFF occurs " + noLockCnt + " times,  expect at most "
2089                 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt);
2090     }
2091 
2092     /**
2093      * Verify shading map for different shading modes.
2094      */
verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2095     private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified,
2096             int shadingMode) throws Exception {
2097 
2098         for (int i = 0; i < numFramesVerified; i++) {
2099             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2100             mCollector.expectEquals("Shading mode result doesn't match request",
2101                     shadingMode, result.get(CaptureResult.SHADING_MODE));
2102             LensShadingMap mapObj = result.get(
2103                     CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP);
2104             assertNotNull("Map object must not be null", mapObj);
2105             int numElementsInMap = mapObj.getGainFactorCount();
2106             float[] map = new float[numElementsInMap];
2107             mapObj.copyGainFactors(map, /*offset*/0);
2108             assertNotNull("Map must not be null", map);
2109             assertFalse(String.format(
2110                     "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE),
2111                     numElementsInMap >= MAX_SHADING_MAP_SIZE);
2112             assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap,
2113                     MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE);
2114 
2115             if (shadingMode == CaptureRequest.SHADING_MODE_FAST ||
2116                     shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) {
2117                 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all
2118                 // elements >= 1.0f
2119 
2120                 int badValueCnt = 0;
2121                 // Detect the bad values of the map data.
2122                 for (int j = 0; j < numElementsInMap; j++) {
2123                     if (Float.isNaN(map[j]) || map[j] < 1.0f) {
2124                         badValueCnt++;
2125                     }
2126                 }
2127                 assertEquals("Number of value in the map is " + badValueCnt + " out of "
2128                         + numElementsInMap, /*expected*/0, /*actual*/badValueCnt);
2129             } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) {
2130                 float[] unityMap = new float[numElementsInMap];
2131                 Arrays.fill(unityMap, 1.0f);
2132                 // shading mode is OFF, expect to receive a unity map.
2133                 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map",
2134                         Arrays.equals(unityMap, map));
2135             }
2136         }
2137     }
2138 
2139     /**
2140      * Test face detection for a camera.
2141      */
2142     private void faceDetectionTestByCamera() throws Exception {
2143         int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked();
2144 
2145         SimpleCaptureCallback listener;
2146         CaptureRequest.Builder requestBuilder =
2147                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2148 
2149         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
2150         for (int mode : faceDetectModes) {
2151             requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode);
2152             if (VERBOSE) {
2153                 Log.v(TAG, "Start testing face detection mode " + mode);
2154             }
2155 
2156             // Create a new listener for each run to avoid the results from one run spill
2157             // into another run.
2158             listener = new SimpleCaptureCallback();
2159             startPreview(requestBuilder, maxPreviewSz, listener);
2160             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2161             verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode);
2162         }
2163 
2164         stopPreview();
2165     }
2166 
2167     /**
2168      * Verify face detection results for different face detection modes.
2169      *
2170      * @param listener The listener to get capture result
2171      * @param numFramesVerified Number of results to be verified
2172      * @param faceDetectionMode Face detection mode to be verified against
2173      */
2174     private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified,
2175             int faceDetectionMode) {
2176         for (int i = 0; i < numFramesVerified; i++) {
2177             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2178             mCollector.expectEquals("Result face detection mode should match the request",
2179                     faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE));
2180 
2181             Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
2182             List<Integer> faceIds = new ArrayList<Integer>(faces.length);
2183             List<Integer> faceScores = new ArrayList<Integer>(faces.length);
2184             if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
2185                 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode",
2186                         0, faces.length);
2187             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
2188                 for (Face face : faces) {
2189                     mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds());
2190                     faceScores.add(face.getScore());
2191                     mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode",
2192                             face.getId() == Face.ID_UNSUPPORTED);
2193                 }
2194             } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
2195                 if (VERBOSE) {
2196                     Log.v(TAG, "Number of faces detected: " + faces.length);
2197                 }
2198 
2199                 for (Face face : faces) {
2200                     Rect faceBound;
2201                     boolean faceRectAvailable =  mCollector.expectTrue("Face rectangle "
2202                             + "shouldn't be null", face.getBounds() != null);
2203                     if (!faceRectAvailable) {
2204                         continue;
2205                     }
2206                     faceBound = face.getBounds();
2207 
2208                     faceScores.add(face.getScore());
2209                     faceIds.add(face.getId());
2210 
2211                     mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode",
2212                             face.getId() != Face.ID_UNSUPPORTED);
2213                     boolean leftEyeAvailable =
2214                             mCollector.expectTrue("Left eye position shouldn't be null",
2215                                     face.getLeftEyePosition() != null);
2216                     boolean rightEyeAvailable =
2217                             mCollector.expectTrue("Right eye position shouldn't be null",
2218                                     face.getRightEyePosition() != null);
2219                     boolean mouthAvailable =
2220                             mCollector.expectTrue("Mouth position shouldn't be null",
2221                             face.getMouthPosition() != null);
2222                     // Eyes/mouth position should be inside of the face rect.
2223                     if (leftEyeAvailable) {
2224                         Point leftEye = face.getLeftEyePosition();
2225                         mCollector.expectTrue("Left eye " + leftEye + "should be"
2226                                 + "inside of face rect " + faceBound,
2227                                 faceBound.contains(leftEye.x, leftEye.y));
2228                     }
2229                     if (rightEyeAvailable) {
2230                         Point rightEye = face.getRightEyePosition();
2231                         mCollector.expectTrue("Right eye " + rightEye + "should be"
2232                                 + "inside of face rect " + faceBound,
2233                                 faceBound.contains(rightEye.x, rightEye.y));
2234                     }
2235                     if (mouthAvailable) {
2236                         Point mouth = face.getMouthPosition();
2237                         mCollector.expectTrue("Mouth " + mouth +  " should be inside of"
2238                                 + " face rect " + faceBound,
2239                                 faceBound.contains(mouth.x, mouth.y));
2240                     }
2241                 }
2242             }
2243             mCollector.expectValuesInRange("Face scores are invalid", faceScores,
2244                     Face.SCORE_MIN, Face.SCORE_MAX);
2245             mCollector.expectValuesUnique("Face ids are invalid", faceIds);
2246         }
2247     }
2248 
2249     /**
2250      * Test tone map mode and result by camera
2251      */
2252     private void toneMapTestByCamera() throws Exception {
2253         if (!mStaticInfo.isManualToneMapSupported()) {
2254             return;
2255         }
2256 
2257         CaptureRequest.Builder requestBuilder =
2258                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2259         int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked();
2260         // Test AUTO modes first. Note that FAST/HQ must both present or not present
2261         for (int i = 0; i < toneMapModes.length; i++) {
2262             if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) {
2263                 int tmpMode = toneMapModes[0];
2264                 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST;
2265                 toneMapModes[i] = tmpMode;
2266             }
2267             if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) {
2268                 int tmpMode = toneMapModes[1];
2269                 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY;
2270                 toneMapModes[i] = tmpMode;
2271             }
2272         }
2273         for (int mode : toneMapModes) {
2274             if (VERBOSE) {
2275                 Log.v(TAG, "Testing tonemap mode " + mode);
2276             }
2277 
2278             requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode);
2279             switch (mode) {
2280                 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE:
2281                     TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR,
2282                             TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR);
2283                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
2284                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2285 
2286                     toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB,
2287                             TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB);
2288                     requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve);
2289                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2290                     break;
2291                 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE:
2292                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f);
2293                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2294                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f);
2295                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2296                     requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f);
2297                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2298                     break;
2299                 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE:
2300                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
2301                             CaptureRequest.TONEMAP_PRESET_CURVE_REC709);
2302                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2303                     requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE,
2304                             CaptureRequest.TONEMAP_PRESET_CURVE_SRGB);
2305                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2306                     break;
2307                 default:
2308                     testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder);
2309                     break;
2310             }
2311         }
2312 
2313 
2314     }
2315 
2316     /**
2317      * Test tonemap mode with speficied request settings
2318      *
2319      * @param numFramesVerified Number of results to be verified
2320      * @param requestBuilder the request builder of settings to be tested
2321      */
2322     private void testToneMapMode (int numFramesVerified,
2323             CaptureRequest.Builder requestBuilder)  throws Exception  {
2324         final int MIN_TONEMAP_CURVE_POINTS = 2;
2325         final Float ZERO = new Float(0);
2326         final Float ONE = new Float(1.0f);
2327 
2328         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2329         int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE);
2330         Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
2331         startPreview(requestBuilder, maxPreviewSz, listener);
2332         waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2333 
2334         int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked();
2335         for (int i = 0; i < numFramesVerified; i++) {
2336             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2337             mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode,
2338                     result.get(CaptureResult.TONEMAP_MODE));
2339             TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE);
2340             int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED);
2341             float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE];
2342             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN);
2343             float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE];
2344             pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE);
2345             float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE];
2346             tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0);
2347             tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0);
2348             tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0);
2349             if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) {
2350                 /**
2351                  * TODO: need figure out a good way to measure the difference
2352                  * between request and result, as they may have different array
2353                  * size.
2354                  */
2355             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) {
2356                 mCollector.expectEquals("Capture result gamma value should match request",
2357                         requestBuilder.get(CaptureRequest.TONEMAP_GAMMA),
2358                         result.get(CaptureResult.TONEMAP_GAMMA));
2359             } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) {
2360                 mCollector.expectEquals("Capture result preset curve should match request",
2361                         requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE),
2362                         result.get(CaptureResult.TONEMAP_PRESET_CURVE));
2363             }
2364 
2365             // Tonemap curve result availability and basic validity check for all modes.
2366             mCollector.expectValuesInRange("Tonemap curve red values are out of range",
2367                     CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE);
2368             mCollector.expectInRange("Tonemap curve red length is out of range",
2369                     mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2370             mCollector.expectValuesInRange("Tonemap curve green values are out of range",
2371                     CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE);
2372             mCollector.expectInRange("Tonemap curve green length is out of range",
2373                     mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2374             mCollector.expectValuesInRange("Tonemap curve blue values are out of range",
2375                     CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE);
2376             mCollector.expectInRange("Tonemap curve blue length is out of range",
2377                     mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2);
2378 
2379             // Make sure capture result tonemap has identical channels.
2380             if (mStaticInfo.isMonochromeCamera()) {
2381                 mCollector.expectEquals("Capture result tonemap of monochrome camera should " +
2382                         "have same dimension for all channels", mapRed.length, mapGreen.length);
2383                 mCollector.expectEquals("Capture result tonemap of monochrome camera should " +
2384                         "have same dimension for all channels", mapRed.length, mapBlue.length);
2385 
2386                 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) {
2387                     boolean isIdentical = true;
2388                     for (int j = 0; j < mapRed.length; j++) {
2389                         isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]);
2390                         if (!isIdentical)
2391                             break;
2392                     }
2393                     mCollector.expectTrue("Capture result tonemap of monochrome camera should " +
2394                             "be identical between all channels", isIdentical);
2395                 }
2396             }
2397         }
2398         stopPreview();
2399     }
2400 
2401     /**
2402      * Test awb mode control.
2403      * <p>
2404      * Test each supported AWB mode, verify the AWB mode in capture result
2405      * matches request. When AWB is locked, the color correction gains and
2406      * transform should remain unchanged.
2407      * </p>
2408      */
2409     private void awbModeAndLockTestByCamera() throws Exception {
2410         int[] awbModes = mStaticInfo.getAwbAvailableModesChecked();
2411         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2412         boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
2413         CaptureRequest.Builder requestBuilder =
2414                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2415         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2416 
2417         for (int mode : awbModes) {
2418             SimpleCaptureCallback listener;
2419             requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode);
2420             listener = new SimpleCaptureCallback();
2421             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2422             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2423 
2424             // Verify AWB mode in capture result.
2425             verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener,
2426                     NUM_FRAMES_VERIFIED);
2427 
2428             if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) {
2429                 // Verify color correction transform and gains stay unchanged after a lock.
2430                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
2431                 listener = new SimpleCaptureCallback();
2432                 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2433                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2434 
2435                 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) {
2436                     waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE,
2437                             CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT);
2438                 }
2439 
2440             }
2441             // Don't verify auto mode result if AWB lock is not supported
2442             if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) {
2443                 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED);
2444             }
2445         }
2446     }
2447 
2448     private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener,
2449             int numFramesVerified) {
2450         // Skip check if cc gains/transform/mode are not available
2451         if (!mStaticInfo.areKeysAvailable(
2452                 CaptureResult.COLOR_CORRECTION_GAINS,
2453                 CaptureResult.COLOR_CORRECTION_TRANSFORM,
2454                 CaptureResult.COLOR_CORRECTION_MODE)) {
2455             return;
2456         }
2457 
2458         CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2459         RggbChannelVector lockedGains =
2460                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2461         ColorSpaceTransform lockedTransform =
2462                 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2463 
2464         for (int i = 0; i < numFramesVerified; i++) {
2465             result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
2466             // Color correction mode check is skipped here, as it is checked in colorCorrectionTest.
2467             validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE));
2468 
2469             RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS);
2470             ColorSpaceTransform transform =
2471                     getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM);
2472             mCollector.expectEquals("Color correction gains should remain unchanged after awb lock",
2473                     lockedGains, gains);
2474             mCollector.expectEquals("Color correction transform should remain unchanged after"
2475                     + " awb lock", lockedTransform, transform);
2476         }
2477     }
2478 
2479     /**
2480      * Test AF mode control.
2481      * <p>
2482      * Test all supported AF modes, verify the AF mode in capture result matches
2483      * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode,
2484      * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED
2485      * state within certain amount of frames.
2486      * </p>
2487      */
2488     private void afModeTestByCamera() throws Exception {
2489         int[] afModes = mStaticInfo.getAfAvailableModesChecked();
2490         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2491         CaptureRequest.Builder requestBuilder =
2492                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2493         startPreview(requestBuilder, maxPreviewSize, /*listener*/null);
2494 
2495         for (int mode : afModes) {
2496             SimpleCaptureCallback listener;
2497             requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode);
2498             listener = new SimpleCaptureCallback();
2499             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2500             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2501 
2502             // Verify AF mode in capture result.
2503             verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener,
2504                     NUM_FRAMES_VERIFIED);
2505 
2506             // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes.
2507             // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily
2508             // result in a passive AF call if the camera has already been focused, and the scene has
2509             // not changed enough to trigger an AF pass.  Skip this constraint for LEGACY.
2510             if (mStaticInfo.isHardwareLevelAtLeastLimited() &&
2511                     (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ||
2512                     mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
2513                 List<Integer> afStateList = new ArrayList<Integer>();
2514                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED);
2515                 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED);
2516                 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList,
2517                         NUM_RESULTS_WAIT_TIMEOUT);
2518             }
2519         }
2520     }
2521 
2522     /**
2523      * Test video and optical stabilizations if they are supported by a given camera.
2524      */
2525     private void stabilizationTestByCamera() throws Exception {
2526         // video stabilization test.
2527         List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys();
2528 
2529         Integer[] videoStabModes = (keys.contains(CameraCharacteristics.
2530                 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ?
2531                 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) :
2532                     new Integer[0];
2533         int[] opticalStabModes = (keys.contains(
2534                 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ?
2535                 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0];
2536 
2537         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2538         CaptureRequest.Builder requestBuilder =
2539                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2540         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2541         startPreview(requestBuilder, maxPreviewSize, listener);
2542 
2543         for (Integer mode : videoStabModes) {
2544             listener = new SimpleCaptureCallback();
2545             requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode);
2546             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2547             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2548             // Video stabilization could return any modes.
2549             verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE,
2550                     videoStabModes, listener, NUM_FRAMES_VERIFIED);
2551         }
2552 
2553         for (int mode : opticalStabModes) {
2554             listener = new SimpleCaptureCallback();
2555             requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode);
2556             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2557             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2558             verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode,
2559                     listener, NUM_FRAMES_VERIFIED);
2560         }
2561 
2562         stopPreview();
2563     }
2564 
2565     private void digitalZoomTestByCamera(Size previewSize) throws Exception {
2566         final int ZOOM_STEPS = 15;
2567         final PointF[] TEST_ZOOM_CENTERS;
2568         final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked();
2569         final float ZOOM_ERROR_MARGIN = 0.01f;
2570         if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) {
2571             // It doesn't make much sense to test the zoom if the device effectively supports
2572             // no zoom.
2573             return;
2574         }
2575 
2576         final int croppingType = mStaticInfo.getScalerCroppingTypeChecked();
2577         if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) {
2578             // Set the four corners in a way that the minimally allowed zoom factor is 2x.
2579             float normalizedLeft = 0.25f;
2580             float normalizedTop = 0.25f;
2581             float normalizedRight = 0.75f;
2582             float normalizedBottom = 0.75f;
2583             // If the max supported zoom is too small, make sure we at least test the max
2584             // Zoom is tested for the four corners.
2585             if (maxZoom < 2.0f) {
2586                 normalizedLeft = 0.5f / maxZoom;
2587                 normalizedTop = 0.5f / maxZoom;
2588                 normalizedRight = 1.0f - normalizedLeft;
2589                 normalizedBottom = 1.0f - normalizedTop;
2590             }
2591             TEST_ZOOM_CENTERS = new PointF[] {
2592                 new PointF(0.5f, 0.5f),   // Center point
2593                 new PointF(normalizedLeft, normalizedTop),     // top left corner zoom
2594                 new PointF(normalizedRight, normalizedTop),    // top right corner zoom
2595                 new PointF(normalizedLeft, normalizedBottom),  // bottom left corner zoom
2596                 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom
2597             };
2598 
2599             if (VERBOSE) {
2600                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM");
2601             }
2602         } else {
2603             // CENTER_ONLY
2604             TEST_ZOOM_CENTERS = new PointF[] {
2605                     new PointF(0.5f, 0.5f),   // Center point
2606             };
2607 
2608             if (VERBOSE) {
2609                 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY");
2610             }
2611         }
2612 
2613         final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
2614         final Rect defaultCropRegion = new Rect(0, 0,
2615                 activeArraySize.width(), activeArraySize.height());
2616         Rect[] cropRegions = new Rect[ZOOM_STEPS];
2617         MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
2618         CaptureRequest.Builder requestBuilder =
2619                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2620         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2621 
2622         updatePreviewSurface(previewSize);
2623         configurePreviewOutput(requestBuilder);
2624 
2625         CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS];
2626 
2627         // Set algorithm regions
2628         final int METERING_RECT_RATIO = 10;
2629         final MeteringRectangle[][] defaultMeteringRects = new MeteringRectangle[][] {
2630                 {
2631                     new MeteringRectangle (
2632                         /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(),
2633                         /*meteringWeight*/1), /* full active region */
2634                 },
2635                 {
2636                     new MeteringRectangle (
2637                         /*x*/0, /*y*/0, activeArraySize.width()/METERING_RECT_RATIO,
2638                         activeArraySize.height()/METERING_RECT_RATIO,
2639                         /*meteringWeight*/1),
2640                 },
2641                 {
2642                     new MeteringRectangle (
2643                         /*x*/(int)(activeArraySize.width() * (0.5f - 0.5f/METERING_RECT_RATIO)),
2644                         /*y*/(int)(activeArraySize.height() * (0.5f - 0.5f/METERING_RECT_RATIO)),
2645                         activeArraySize.width()/METERING_RECT_RATIO,
2646                         activeArraySize.height()/METERING_RECT_RATIO,
2647                         /*meteringWeight*/1),
2648                 },
2649         };
2650 
2651         final int CAPTURE_SUBMIT_REPEAT;
2652         {
2653             int maxLatency = mStaticInfo.getSyncMaxLatency();
2654             if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
2655                 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1;
2656             } else {
2657                 CAPTURE_SUBMIT_REPEAT = maxLatency + 1;
2658             }
2659         }
2660 
2661         if (VERBOSE) {
2662             Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT);
2663         }
2664 
2665         for (MeteringRectangle[] meteringRect : defaultMeteringRects) {
2666             for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2667                 update3aRegion(requestBuilder, algo,  meteringRect);
2668             }
2669 
2670             for (PointF center : TEST_ZOOM_CENTERS) {
2671                 Rect previousCrop = null;
2672 
2673                 for (int i = 0; i < ZOOM_STEPS; i++) {
2674                     /*
2675                      * Submit capture request
2676                      */
2677                     float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS);
2678                     cropRegions[i] = getCropRegionForZoom(zoomFactor, center,
2679                             maxZoom, defaultCropRegion);
2680                     if (VERBOSE) {
2681                         Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " +
2682                                 center + " The cropRegion is " + cropRegions[i] +
2683                                 " Preview size is " + previewSize);
2684                     }
2685                     requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]);
2686                     requests[i] = requestBuilder.build();
2687                     for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) {
2688                         if (VERBOSE) {
2689                             Log.v(TAG, "submit crop region " + cropRegions[i]);
2690                         }
2691                         mSession.capture(requests[i], listener, mHandler);
2692                     }
2693 
2694                     /*
2695                      * Validate capture result
2696                      */
2697                     waitForNumResults(listener, CAPTURE_SUBMIT_REPEAT - 1); // Drop first few frames
2698                     TotalCaptureResult result = listener.getTotalCaptureResultForRequest(
2699                             requests[i], NUM_RESULTS_WAIT_TIMEOUT);
2700                     List<CaptureResult> partialResults = result.getPartialResults();
2701 
2702                     Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
2703                     for (CaptureResult partialResult : partialResults) {
2704                         Rect cropRegionInPartial =
2705                                 partialResult.get(CaptureResult.SCALER_CROP_REGION);
2706                         if (cropRegionInPartial != null) {
2707                             mCollector.expectEquals("SCALER_CROP_REGION in partial result must "
2708                                     + "match in final result", cropRegionInPartial, cropRegion);
2709                         }
2710                     }
2711 
2712                     /*
2713                      * Validate resulting crop regions
2714                      */
2715                     if (previousCrop != null) {
2716                         Rect currentCrop = cropRegion;
2717                         mCollector.expectTrue(String.format(
2718                                 "Crop region should shrink or stay the same " +
2719                                         "(previous = %s, current = %s)",
2720                                         previousCrop, currentCrop),
2721                                 previousCrop.equals(currentCrop) ||
2722                                     (previousCrop.width() > currentCrop.width() &&
2723                                      previousCrop.height() > currentCrop.height()));
2724                     }
2725 
2726                     if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2727                         mCollector.expectRectsAreSimilar(
2728                                 "Request and result crop region should be similar",
2729                                 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA);
2730                     }
2731 
2732                     if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) {
2733                         mCollector.expectRectCentered(
2734                                 "Result crop region should be centered inside the active array",
2735                                 new Size(activeArraySize.width(), activeArraySize.height()),
2736                                 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED);
2737                     }
2738 
2739                     /*
2740                      * Validate resulting metering regions
2741                      */
2742 
2743                     // Use the actual reported crop region to calculate the resulting metering region
2744                     expectRegions[i] = getExpectedOutputRegion(
2745                             /*requestRegion*/meteringRect,
2746                             /*cropRect*/     cropRegion);
2747 
2748                     // Verify Output 3A region is intersection of input 3A region and crop region
2749                     for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2750                         validate3aRegion(result, partialResults, algo, expectRegions[i],
2751                                 false/*scaleByZoomRatio*/);
2752                     }
2753 
2754                     previousCrop = cropRegion;
2755                 }
2756 
2757                 if (maxZoom > 1.0f) {
2758                     mCollector.expectTrue(
2759                             String.format("Most zoomed-in crop region should be smaller" +
2760                                             "than active array w/h" +
2761                                             "(last crop = %s, active array = %s)",
2762                                             previousCrop, activeArraySize),
2763                                 (previousCrop.width() < activeArraySize.width() &&
2764                                  previousCrop.height() < activeArraySize.height()));
2765                 }
2766             }
2767         }
2768     }
2769 
2770     private void zoomRatioTestByCamera(Size previewSize) throws Exception {
2771         final int ZOOM_STEPS = 15;
2772         final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked();
2773         // The error margin is derive from a VGA size camera zoomed all the way to 10x, in which
2774         // case the cropping error can be as large as 480/46 - 480/48 = 0.435.
2775         final float ZOOM_ERROR_MARGIN = 0.05f;
2776 
2777         final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked();
2778         final Rect defaultCropRegion =
2779                 new Rect(0, 0, activeArraySize.width(), activeArraySize.height());
2780         final Rect zoom2xCropRegion =
2781                 new Rect(activeArraySize.width()/4, activeArraySize.height()/4,
2782                         activeArraySize.width()*3/4, activeArraySize.height()*3/4);
2783         MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][];
2784         CaptureRequest.Builder requestBuilder =
2785                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2786         requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion);
2787         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2788 
2789         updatePreviewSurface(previewSize);
2790         configurePreviewOutput(requestBuilder);
2791 
2792         // Set algorithm regions to full active region
2793         final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] {
2794                 new MeteringRectangle (
2795                         /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(),
2796                         /*meteringWeight*/1)
2797         };
2798 
2799         for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2800             update3aRegion(requestBuilder, algo,  defaultMeteringRect);
2801         }
2802 
2803         final int captureSubmitRepeat;
2804         {
2805             int maxLatency = mStaticInfo.getSyncMaxLatency();
2806             if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
2807                 captureSubmitRepeat = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1;
2808             } else {
2809                 captureSubmitRepeat = maxLatency + 1;
2810             }
2811         }
2812 
2813         float previousRatio = zoomRatioRange.getLower();
2814         for (int i = 0; i < ZOOM_STEPS; i++) {
2815             /*
2816              * Submit capture request
2817              */
2818             float zoomFactor = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() -
2819                     zoomRatioRange.getLower()) * i / ZOOM_STEPS;
2820             if (VERBOSE) {
2821                 Log.v(TAG, "Testing Zoom ratio " + zoomFactor + " Preview size is " + previewSize);
2822             }
2823             requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor);
2824             requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion);
2825             CaptureRequest request = requestBuilder.build();
2826             for (int j = 0; j < captureSubmitRepeat; ++j) {
2827                 mSession.capture(request, listener, mHandler);
2828             }
2829 
2830             /*
2831              * Validate capture result
2832              */
2833             waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames
2834             TotalCaptureResult result = listener.getTotalCaptureResultForRequest(
2835                     request, NUM_RESULTS_WAIT_TIMEOUT);
2836             List<CaptureResult> partialResults = result.getPartialResults();
2837             float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO);
2838             Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION);
2839 
2840             for (CaptureResult partialResult : partialResults) {
2841                 Rect cropRegionInPartial =
2842                         partialResult.get(CaptureResult.SCALER_CROP_REGION);
2843                 if (cropRegionInPartial != null) {
2844                     mCollector.expectEquals("SCALER_CROP_REGION in partial result must "
2845                             + "match in final result", cropRegionInPartial, cropRegion);
2846                 }
2847 
2848                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
2849                 if (zoomRatioInPartial != null) {
2850                     mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match"
2851                             + " that in final result", resultZoomRatio, zoomRatioInPartial);
2852                 }
2853             }
2854 
2855             /*
2856              * Validate resulting crop regions and zoom ratio
2857              */
2858             mCollector.expectTrue(String.format(
2859                     "Zoom ratio should increase or stay the same " +
2860                             "(previous = %f, current = %f)",
2861                             previousRatio, resultZoomRatio),
2862                     Math.abs(previousRatio - resultZoomRatio) < ZOOM_ERROR_MARGIN ||
2863                         (previousRatio < resultZoomRatio));
2864 
2865             mCollector.expectTrue(String.format(
2866                     "Request and result zoom ratio should be similar " +
2867                     "(requested = %f, result = %f", zoomFactor, resultZoomRatio),
2868                     Math.abs(zoomFactor - resultZoomRatio)/zoomFactor <= ZOOM_ERROR_MARGIN);
2869 
2870             //In case zoom ratio is converted to crop region at HAL, due to error magnification
2871             //when converting to post-zoom crop region, scale the error threshold for crop region
2872             //check.
2873             float errorMultiplier = Math.max(1.0f, zoomFactor);
2874             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2875                 mCollector.expectRectsAreSimilar(
2876                         "Request and result crop region should be similar",
2877                         defaultCropRegion, cropRegion,
2878                         CROP_REGION_ERROR_PERCENT_DELTA * errorMultiplier);
2879             }
2880 
2881             mCollector.expectRectCentered(
2882                     "Result crop region should be centered inside the active array",
2883                     new Size(activeArraySize.width(), activeArraySize.height()),
2884                     cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED * errorMultiplier);
2885 
2886             /*
2887              * Validate resulting metering regions
2888              */
2889             // Use the actual reported crop region to calculate the resulting metering region
2890             expectRegions[i] = getExpectedOutputRegion(
2891                     /*requestRegion*/defaultMeteringRect,
2892                     /*cropRect*/     cropRegion);
2893 
2894             // Verify Output 3A region is intersection of input 3A region and crop region
2895             boolean scaleByZoomRatio = zoomFactor > 1.0f;
2896             for (int algo = 0; algo < NUM_ALGORITHMS; algo++) {
2897                 validate3aRegion(result, partialResults, algo, expectRegions[i], scaleByZoomRatio);
2898             }
2899 
2900             previousRatio = resultZoomRatio;
2901 
2902             /*
2903              * Set windowboxing cropRegion while zoomRatio is not 1.0x, and make sure the crop
2904              * region was overwritten.
2905              */
2906             if (zoomFactor != 1.0f) {
2907                 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom2xCropRegion);
2908                 CaptureRequest requestWithCrop = requestBuilder.build();
2909                 for (int j = 0; j < captureSubmitRepeat; ++j) {
2910                     mSession.capture(requestWithCrop, listener, mHandler);
2911                 }
2912 
2913                 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames
2914                 CaptureResult resultWithCrop = listener.getCaptureResultForRequest(
2915                         requestWithCrop, NUM_RESULTS_WAIT_TIMEOUT);
2916                 float resultZoomRatioWithCrop = getValueNotNull(resultWithCrop,
2917                         CaptureResult.CONTROL_ZOOM_RATIO);
2918                 Rect cropRegionWithCrop = getValueNotNull(resultWithCrop,
2919                         CaptureResult.SCALER_CROP_REGION);
2920 
2921                 mCollector.expectTrue(String.format(
2922                         "Result zoom ratio should remain the same (activeArrayCrop: %f, " +
2923                         "zoomedCrop: %f)", resultZoomRatio, resultZoomRatioWithCrop),
2924                         Math.abs(resultZoomRatio - resultZoomRatioWithCrop) < ZOOM_ERROR_MARGIN);
2925 
2926                 if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
2927                     mCollector.expectRectsAreSimilar(
2928                             "Result crop region should remain the same with or without crop",
2929                             cropRegion, cropRegionWithCrop, CROP_REGION_ERROR_PERCENT_DELTA);
2930                 }
2931             }
2932         }
2933     }
2934 
2935     private void digitalZoomPreviewCombinationTestByCamera() throws Exception {
2936         final double ASPECT_RATIO_THRESHOLD = 0.001;
2937         List<Double> aspectRatiosTested = new ArrayList<Double>();
2938         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2939         aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight());
2940 
2941         for (Size size : mOrderedPreviewSizes) {
2942             // Max preview size was already tested in testDigitalZoom test. skip it.
2943             if (size.equals(maxPreviewSize)) {
2944                 continue;
2945             }
2946 
2947             // Only test the largest size for each aspect ratio.
2948             double aspectRatio = (double)(size.getWidth()) / size.getHeight();
2949             if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) {
2950                 continue;
2951             }
2952 
2953             if (VERBOSE) {
2954                 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom");
2955             }
2956 
2957             aspectRatiosTested.add(aspectRatio);
2958             digitalZoomTestByCamera(size);
2959         }
2960     }
2961 
2962     private static boolean isAspectRatioContained(List<Double> aspectRatioList,
2963             double aspectRatio, double delta) {
2964         for (Double ratio : aspectRatioList) {
2965             if (Math.abs(ratio - aspectRatio) < delta) {
2966                 return true;
2967             }
2968         }
2969 
2970         return false;
2971     }
2972 
2973     private void sceneModeTestByCamera() throws Exception {
2974         int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked();
2975         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2976         CaptureRequest.Builder requestBuilder =
2977                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2978         SimpleCaptureCallback listener = new SimpleCaptureCallback();
2979         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
2980         startPreview(requestBuilder, maxPreviewSize, listener);
2981 
2982         for(int mode : sceneModes) {
2983             requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode);
2984             listener = new SimpleCaptureCallback();
2985             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
2986             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
2987 
2988             verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE,
2989                     mode, listener, NUM_FRAMES_VERIFIED);
2990             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
2991             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
2992                     CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED);
2993         }
2994     }
2995 
2996     private void effectModeTestByCamera() throws Exception {
2997         int[] effectModes = mStaticInfo.getAvailableEffectModesChecked();
2998         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
2999         CaptureRequest.Builder requestBuilder =
3000                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
3001         requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
3002         SimpleCaptureCallback listener = new SimpleCaptureCallback();
3003         startPreview(requestBuilder, maxPreviewSize, listener);
3004 
3005         for(int mode : effectModes) {
3006             requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode);
3007             listener = new SimpleCaptureCallback();
3008             mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler);
3009             waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
3010 
3011             verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE,
3012                     mode, listener, NUM_FRAMES_VERIFIED);
3013             // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED
3014             verifyCaptureResultForKey(CaptureResult.CONTROL_MODE,
3015                     CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED);
3016         }
3017     }
3018 
3019     private void extendedSceneModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception {
3020         Capability[] extendedSceneModeCaps = mStaticInfo.getAvailableExtendedSceneModeCapsChecked();
3021         if (extendedSceneModeCaps.length == 0) {
3022             return;
3023         }
3024 
3025         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
3026         CaptureRequest.Builder requestBuilder =
3027                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
3028 
3029         for (Capability cap : extendedSceneModeCaps) {
3030             int mode = cap.getMode();
3031             requestBuilder.set(CaptureRequest.CONTROL_EXTENDED_SCENE_MODE, mode);
3032 
3033             // Test that DISABLED and BOKEH_CONTINUOUS mode doesn't slow down the frame rate
3034             if (mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_DISABLED ||
3035                     mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) {
3036                 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges);
3037             }
3038 
3039             Range<Float> zoomRange = cap.getZoomRatioRange();
3040             float[] zoomRatios = new float[]{zoomRange.getLower(), zoomRange.getUpper()};
3041             for (float ratio : zoomRatios) {
3042                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
3043                 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, ratio);
3044                 startPreview(requestBuilder, maxPreviewSize, listener);
3045                 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
3046 
3047                 verifyCaptureResultForKey(CaptureResult.CONTROL_EXTENDED_SCENE_MODE,
3048                         mode, listener, NUM_FRAMES_VERIFIED);
3049                 verifyCaptureResultForKey(CaptureResult.CONTROL_ZOOM_RATIO,
3050                         ratio, listener, NUM_FRAMES_VERIFIED);
3051             }
3052         }
3053     }
3054 
3055     //----------------------------------------------------------------
3056     //---------Below are common functions for all tests.--------------
3057     //----------------------------------------------------------------
3058 
3059     /**
3060      * Enable exposure manual control and change exposure and sensitivity and
3061      * clamp the value into the supported range.
3062      */
3063     private void changeExposure(CaptureRequest.Builder requestBuilder,
3064             long expTime, int sensitivity) {
3065         // Check if the max analog sensitivity is available and no larger than max sensitivity.  The
3066         // max analog sensitivity is not actually used here. This is only an extra correctness
3067         // check.
3068         mStaticInfo.getMaxAnalogSensitivityChecked();
3069 
3070         expTime = mStaticInfo.getExposureClampToRange(expTime);
3071         sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity);
3072 
3073         requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
3074         requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime);
3075         requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
3076     }
3077     /**
3078      * Enable exposure manual control and change exposure time and
3079      * clamp the value into the supported range.
3080      *
3081      * <p>The sensitivity is set to default value.</p>
3082      */
3083     private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) {
3084         changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY);
3085     }
3086 
3087     /**
3088      * Get the exposure time array that contains multiple exposure time steps in
3089      * the exposure time range, in nanoseconds.
3090      */
3091     private long[] getExposureTimeTestValues() {
3092         long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1];
3093         long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS);
3094         long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS);
3095 
3096         long range = maxExpTime - minExpTime;
3097         double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS;
3098         for (int i = 0; i < testValues.length; i++) {
3099             testValues[i] = maxExpTime - (long)(stepSize * i);
3100             testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]);
3101         }
3102 
3103         return testValues;
3104     }
3105 
3106     /**
3107      * Generate test focus distances in range of [0, minFocusDistance] in increasing order.
3108      *
3109      * @param repeatMin number of times minValue will be repeated.
3110      * @param repeatMax number of times maxValue will be repeated.
3111      */
3112     private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) {
3113         int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax;
3114         float[] testValues = new float[totalCount];
3115         float minValue = 0;
3116         float maxValue = mStaticInfo.getMinimumFocusDistanceChecked();
3117 
3118         float range = maxValue - minValue;
3119         float stepSize = range / NUM_TEST_FOCUS_DISTANCES;
3120 
3121         for (int i = 0; i < repeatMin; i++) {
3122             testValues[i] = minValue;
3123         }
3124         for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) {
3125             testValues[repeatMin+i] = minValue + stepSize * i;
3126         }
3127         for (int i = 0; i < repeatMax; i++) {
3128             testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] =
3129                     maxValue;
3130         }
3131 
3132         return testValues;
3133     }
3134 
3135     /**
3136      * Get the sensitivity array that contains multiple sensitivity steps in the
3137      * sensitivity range.
3138      * <p>
3139      * Sensitivity number of test values is determined by
3140      * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and
3141      * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}.
3142      * </p>
3143      */
3144     private int[] getSensitivityTestValues() {
3145         int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault(
3146                 DEFAULT_SENSITIVITY);
3147         int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault(
3148                 DEFAULT_SENSITIVITY);
3149 
3150         int range = maxSensitivity - minSensitivity;
3151         int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE;
3152         int numSteps = range / stepSize;
3153         // Bound the test steps to avoid supper long test.
3154         if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) {
3155             numSteps = DEFAULT_NUM_SENSITIVITY_STEPS;
3156             stepSize = range / numSteps;
3157         }
3158         int[] testValues = new int[numSteps + 1];
3159         for (int i = 0; i < testValues.length; i++) {
3160             testValues[i] = maxSensitivity - stepSize * i;
3161             testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]);
3162         }
3163 
3164         return testValues;
3165     }
3166 
3167     /**
3168      * Validate the AE manual control exposure time.
3169      *
3170      * <p>Exposure should be close enough, and only round down if they are not equal.</p>
3171      *
3172      * @param request Request exposure time
3173      * @param result Result exposure time
3174      */
3175     private void validateExposureTime(long request, long result) {
3176         long expTimeDelta = request - result;
3177         long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request
3178                 * EXPOSURE_TIME_ERROR_MARGIN_RATE));
3179         // First, round down not up, second, need close enough.
3180         mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: "
3181                 + request + " result: " + result,
3182                 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0);
3183     }
3184 
3185     /**
3186      * Validate AE manual control sensitivity.
3187      *
3188      * @param request Request sensitivity
3189      * @param result Result sensitivity
3190      */
3191     private void validateSensitivity(int request, int result) {
3192         float sensitivityDelta = request - result;
3193         float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE;
3194         // First, round down not up, second, need close enough.
3195         mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: "
3196                 + request + " result: " + result,
3197                 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0);
3198     }
3199 
3200     /**
3201      * Validate frame duration for a given capture.
3202      *
3203      * <p>Frame duration should be longer than exposure time.</p>
3204      *
3205      * @param result The capture result for a given capture
3206      */
3207     private void validateFrameDurationForCapture(CaptureResult result) {
3208         long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
3209         long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
3210         if (VERBOSE) {
3211             Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime);
3212         }
3213 
3214         mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure"
3215                 + " time (%d) for a given capture", frameDuration, expTime),
3216                 frameDuration >= expTime);
3217 
3218         validatePipelineDepth(result);
3219     }
3220 
3221     /**
3222      * Basic verification for the control mode capture result.
3223      *
3224      * @param key The capture result key to be verified against
3225      * @param requestMode The request mode for this result
3226      * @param listener The capture listener to get capture results
3227      * @param numFramesVerified The number of capture results to be verified
3228      */
3229     private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode,
3230             SimpleCaptureCallback listener, int numFramesVerified) {
3231         for (int i = 0; i < numFramesVerified; i++) {
3232             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
3233             validatePipelineDepth(result);
3234             T resultMode = getValueNotNull(result, key);
3235             if (VERBOSE) {
3236                 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: "
3237                         + resultMode.toString());
3238             }
3239             mCollector.expectEquals("Key " + key.getName() + " result should match request",
3240                     requestMode, resultMode);
3241         }
3242     }
3243 
3244     /**
3245      * Basic verification that the value of a capture result key should be one of the expected
3246      * values.
3247      *
3248      * @param key The capture result key to be verified against
3249      * @param expectedModes The list of any possible expected modes for this result
3250      * @param listener The capture listener to get capture results
3251      * @param numFramesVerified The number of capture results to be verified
3252      */
3253     private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes,
3254             SimpleCaptureCallback listener, int numFramesVerified) {
3255         for (int i = 0; i < numFramesVerified; i++) {
3256             CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
3257             validatePipelineDepth(result);
3258             T resultMode = getValueNotNull(result, key);
3259             if (VERBOSE) {
3260                 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: "
3261                         + resultMode.toString());
3262             }
3263             // Capture result should be one of the expected values.
3264             mCollector.expectContains(expectedModes, resultMode);
3265         }
3266     }
3267 
3268     /**
3269      * Verify if the fps is slow down for given input request with certain
3270      * controls inside.
3271      * <p>
3272      * This method selects a max preview size for each fps range, and then
3273      * configure the preview stream. Preview is started with the max preview
3274      * size, and then verify if the result frame duration is in the frame
3275      * duration range.
3276      * </p>
3277      *
3278      * @param requestBuilder The request builder that contains post-processing
3279      *            controls that could impact the output frame rate, such as
3280      *            {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of
3281      *            these controls must be set to some values such that the frame
3282      *            rate is not slow down.
3283      * @param numFramesVerified The number of frames to be verified
3284      * @param fpsRanges The fps ranges to be verified
3285      */
3286     private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder,
3287             int numFramesVerified, List<Range<Integer>> fpsRanges )  throws Exception {
3288         boolean frameDurationAvailable = true;
3289         // Allow a few frames for AE to settle on target FPS range
3290         final int NUM_FRAME_TO_SKIP = 6;
3291         float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN;
3292         if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) {
3293             frameDurationAvailable = false;
3294             // Allow a larger error margin (1.5%) for timestamps
3295             frameDurationErrorMargin = 0.015f;
3296         }
3297         if (mStaticInfo.isExternalCamera()) {
3298             // Allow a even larger error margin (15%) for external camera timestamps
3299             frameDurationErrorMargin = 0.15f;
3300         }
3301 
3302         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
3303         Range<Integer> fpsRange;
3304         SimpleCaptureCallback resultListener;
3305 
3306         for (int i = 0; i < fpsRanges.size(); i += 1) {
3307             fpsRange = fpsRanges.get(i);
3308             Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange);
3309             // If unable to find a preview size, then log the failure, and skip this run.
3310             if (previewSz == null) {
3311                 if (mStaticInfo.isCapabilitySupported(
3312                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
3313                     mCollector.addMessage(String.format(
3314                             "Unable to find a preview size supporting given fps range %s",
3315                             fpsRange));
3316                 }
3317                 continue;
3318             }
3319 
3320             if (VERBOSE) {
3321                 Log.v(TAG, String.format("Test fps range %s for preview size %s",
3322                         fpsRange, previewSz.toString()));
3323             }
3324             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
3325             // Turn off auto antibanding to avoid exposure time and frame duration interference
3326             // from antibanding algorithm.
3327             if (antiBandingOffIsSupported) {
3328                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
3329                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
3330             } else {
3331                 // The device doesn't implement the OFF mode, test continues. It need make sure
3332                 // that the antibanding algorithm doesn't slow down the fps.
3333                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
3334                         " not slow down the frame rate regardless of its current antibanding" +
3335                         " mode");
3336             }
3337 
3338             resultListener = new SimpleCaptureCallback();
3339             startPreview(requestBuilder, previewSz, resultListener);
3340             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
3341             // Wait several more frames for AE to settle on target FPS range
3342             waitForNumResults(resultListener, NUM_FRAME_TO_SKIP);
3343 
3344             long[] frameDurationRange = new long[]{
3345                     (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
3346             long captureTime = 0, prevCaptureTime = 0;
3347             for (int j = 0; j < numFramesVerified; j++) {
3348                 long frameDuration = frameDurationRange[0];
3349                 CaptureResult result =
3350                         resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
3351                 validatePipelineDepth(result);
3352                 if (frameDurationAvailable) {
3353                     frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
3354                 } else {
3355                     // if frame duration is not available, check timestamp instead
3356                     captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP);
3357                     if (j > 0) {
3358                         frameDuration = captureTime - prevCaptureTime;
3359                     }
3360                     prevCaptureTime = captureTime;
3361                 }
3362                 mCollector.expectInRange(
3363                         "Frame duration must be in the range of " +
3364                                 Arrays.toString(frameDurationRange),
3365                         frameDuration,
3366                         (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)),
3367                         (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin)));
3368             }
3369         }
3370 
3371         stopPreview();
3372     }
3373 
3374     /**
3375      * Validate the pipeline depth result.
3376      *
3377      * @param result The capture result to get pipeline depth data
3378      */
3379     private void validatePipelineDepth(CaptureResult result) {
3380         final byte MIN_PIPELINE_DEPTH = 1;
3381         byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked();
3382         Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH);
3383         mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]",
3384                 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH,
3385                 maxPipelineDepth);
3386     }
3387 
3388     /**
3389      * Calculate the anti-flickering corrected exposure time.
3390      * <p>
3391      * If the input exposure time is very short (shorter than flickering
3392      * boundary), which indicate the scene is bright and very likely at outdoor
3393      * environment, skip the correction, as it doesn't make much sense by doing so.
3394      * </p>
3395      * <p>
3396      * For long exposure time (larger than the flickering boundary), find the
3397      * exposure time that is closest to the flickering boundary.
3398      * </p>
3399      *
3400      * @param flickeringMode The flickering mode
3401      * @param exposureTime The input exposureTime to be corrected
3402      * @return anti-flickering corrected exposure time
3403      */
3404     private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) {
3405         if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) {
3406             throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz");
3407         }
3408         long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS;
3409         if (flickeringMode == ANTI_FLICKERING_60HZ) {
3410             flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS;
3411         }
3412 
3413         if (exposureTime <= flickeringBoundary) {
3414             return exposureTime;
3415         }
3416 
3417         // Find the closest anti-flickering corrected exposure time
3418         long correctedExpTime = exposureTime + (flickeringBoundary / 2);
3419         correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary);
3420         return correctedExpTime;
3421     }
3422 
3423     /**
3424      * Update one 3A region in capture request builder if that region is supported. Do nothing
3425      * if the specified 3A region is not supported by camera device.
3426      * @param requestBuilder The request to be updated
3427      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
3428      * @param regions The 3A regions to be set
3429      */
3430     private void update3aRegion(
3431             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions)
3432     {
3433         int maxRegions;
3434         CaptureRequest.Key<MeteringRectangle[]> key;
3435 
3436         if (regions == null || regions.length == 0) {
3437             throw new IllegalArgumentException("Invalid input 3A region!");
3438         }
3439 
3440         switch (algoIdx) {
3441             case INDEX_ALGORITHM_AE:
3442                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
3443                 key = CaptureRequest.CONTROL_AE_REGIONS;
3444                 break;
3445             case INDEX_ALGORITHM_AWB:
3446                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
3447                 key = CaptureRequest.CONTROL_AWB_REGIONS;
3448                 break;
3449             case INDEX_ALGORITHM_AF:
3450                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
3451                 key = CaptureRequest.CONTROL_AF_REGIONS;
3452                 break;
3453             default:
3454                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
3455         }
3456 
3457         if (maxRegions >= regions.length) {
3458             requestBuilder.set(key, regions);
3459         }
3460     }
3461 
3462     /**
3463      * Validate one 3A region in capture result equals to expected region if that region is
3464      * supported. Do nothing if the specified 3A region is not supported by camera device.
3465      * @param result The capture result to be validated
3466      * @param partialResults The partial results to be validated
3467      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
3468      * @param expectRegions The 3A regions expected in capture result
3469      * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio
3470      */
3471     private void validate3aRegion(
3472             CaptureResult result, List<CaptureResult> partialResults, int algoIdx,
3473             MeteringRectangle[] expectRegions, boolean scaleByZoomRatio)
3474     {
3475         // There are multiple cases where result 3A region could be slightly different than the
3476         // request:
3477         // 1. Distortion correction,
3478         // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger
3479         //    than 1.
3480         // 3. Precision loss due to converting to HAL zoom ratio and back
3481         // 4. Error magnification due to active array scale-up when zoom ratio API is used.
3482         //
3483         // To handle all these scenarios, make the threshold larger, and scale the threshold based
3484         // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller
3485         // than 1x.
3486         final int maxCoordOffset = 5;
3487         int maxRegions;
3488         CaptureResult.Key<MeteringRectangle[]> key;
3489         MeteringRectangle[] actualRegion;
3490 
3491         switch (algoIdx) {
3492             case INDEX_ALGORITHM_AE:
3493                 maxRegions = mStaticInfo.getAeMaxRegionsChecked();
3494                 key = CaptureResult.CONTROL_AE_REGIONS;
3495                 break;
3496             case INDEX_ALGORITHM_AWB:
3497                 maxRegions = mStaticInfo.getAwbMaxRegionsChecked();
3498                 key = CaptureResult.CONTROL_AWB_REGIONS;
3499                 break;
3500             case INDEX_ALGORITHM_AF:
3501                 maxRegions = mStaticInfo.getAfMaxRegionsChecked();
3502                 key = CaptureResult.CONTROL_AF_REGIONS;
3503                 break;
3504             default:
3505                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
3506         }
3507 
3508         int maxDist = maxCoordOffset;
3509         if (scaleByZoomRatio) {
3510             Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO);
3511             for (CaptureResult partialResult : partialResults) {
3512                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
3513                 if (zoomRatioInPartial != null) {
3514                     mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match"
3515                             + " that in final result", zoomRatio, zoomRatioInPartial);
3516                 }
3517             }
3518             maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f));
3519         }
3520 
3521         if (maxRegions > 0)
3522         {
3523             actualRegion = getValueNotNull(result, key);
3524             for (CaptureResult partialResult : partialResults) {
3525                 MeteringRectangle[] actualRegionInPartial = partialResult.get(key);
3526                 if (actualRegionInPartial != null) {
3527                     mCollector.expectEquals("Key " + key.getName() + " in partial result must match"
3528                             + " that in final result", actualRegionInPartial, actualRegion);
3529                 }
3530             }
3531 
3532             for (int i = 0; i < actualRegion.length; i++) {
3533                 // If the expected region's metering weight is 0, allow the camera device
3534                 // to override it.
3535                 if (expectRegions[i].getMeteringWeight() == 0) {
3536                     continue;
3537                 }
3538 
3539                 Rect a = actualRegion[i].getRect();
3540                 Rect e = expectRegions[i].getRect();
3541 
3542                 if (VERBOSE) {
3543                     Log.v(TAG, "Actual region " + actualRegion[i].toString() +
3544                             ", expected region " + expectRegions[i].toString() +
3545                             ", maxDist " + maxDist);
3546                 }
3547                 if (!mCollector.expectLessOrEqual(
3548                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
3549                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3550                     maxDist, Math.abs(a.left - e.left))) continue;
3551                 if (!mCollector.expectLessOrEqual(
3552                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
3553                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3554                     maxDist, Math.abs(a.right - e.right))) continue;
3555                 if (!mCollector.expectLessOrEqual(
3556                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
3557                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3558                     maxDist, Math.abs(a.top - e.top))) continue;
3559                 if (!mCollector.expectLessOrEqual(
3560                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
3561                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
3562                     maxDist, Math.abs(a.bottom - e.bottom))) continue;
3563             }
3564         }
3565     }
3566 }
3567