1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.CameraCharacteristics.*; 20 import static android.hardware.camera2.cts.CameraTestUtils.*; 21 22 import android.graphics.Point; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.TotalCaptureResult; 32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 33 import android.hardware.camera2.cts.helpers.StaticMetadata; 34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 35 import android.hardware.camera2.params.BlackLevelPattern; 36 import android.hardware.camera2.params.Capability; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.Face; 39 import android.hardware.camera2.params.LensShadingMap; 40 import android.hardware.camera2.params.MeteringRectangle; 41 import android.hardware.camera2.params.RggbChannelVector; 42 import android.hardware.camera2.params.TonemapCurve; 43 import android.hardware.cts.helpers.CameraUtils; 44 import android.media.Image; 45 import android.os.Build; 46 import android.os.Parcel; 47 import android.platform.test.annotations.AppModeFull; 48 import android.platform.test.annotations.RequiresFlagsEnabled; 49 import android.util.ArraySet; 50 import android.util.Log; 51 import android.util.Pair; 52 import android.util.Range; 53 import android.util.Rational; 54 import android.util.Size; 55 import android.view.Surface; 56 57 import com.android.compatibility.common.util.PropertyUtil; 58 import com.android.internal.camera.flags.Flags; 59 60 import org.junit.Test; 61 import org.junit.runner.RunWith; 62 import org.junit.runners.Parameterized; 63 64 import java.nio.ByteBuffer; 65 import java.util.ArrayList; 66 import java.util.Arrays; 67 import java.util.List; 68 69 /** 70 * <p> 71 * Basic test for camera CaptureRequest key controls. 72 * </p> 73 * <p> 74 * Several test categories are covered: manual sensor control, 3A control, 75 * manual ISP control and other per-frame control and synchronization. 76 * </p> 77 */ 78 79 @RunWith(Parameterized.class) 80 public class CaptureRequestTest extends Camera2SurfaceViewTestCase { 81 private static final String TAG = "CaptureRequestTest"; 82 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 83 private static final int NUM_FRAMES_VERIFIED = 15; 84 private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60; 85 /** 30ms exposure time must be supported by full capability devices. */ 86 private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms 87 private static final int DEFAULT_SENSITIVITY = 100; 88 private static final int RGGB_COLOR_CHANNEL_COUNT = 4; 89 private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT; 90 private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT; 91 private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L; 92 private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms 93 private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation. 94 private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation. 95 private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 96 private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.06f; // 6%, Approximation. 97 private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3; 98 private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8; 99 private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100; 100 private static final int NUM_RESULTS_WAIT_TIMEOUT = 100; 101 private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8; 102 private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100; 103 private static final int NUM_PARTIAL_FRAMES_PFC = 2; 104 private static final int NUM_PARTIAL_FRAMES_NPFC = 6; 105 106 private static final int NUM_TEST_FOCUS_DISTANCES = 10; 107 private static final int NUM_FOCUS_DISTANCES_REPEAT = 3; 108 // 5 percent error margin for calibrated device 109 private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f; 110 // 25 percent error margin for uncalibrated device 111 private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f; 112 // 10 percent error margin for approximate device 113 private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f; 114 // 1 percent boundary margin for focus range verify 115 private static final float FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT = 0.01f; 116 private static final int ANTI_FLICKERING_50HZ = 1; 117 private static final int ANTI_FLICKERING_60HZ = 2; 118 // 5 percent error margin for resulting crop regions 119 private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f; 120 private static final float ZOOM_RATIO_ERROR_PERCENT_DELTA = 0.05f; 121 122 // 1 percent error margin for centering the crop region 123 private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f; 124 private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f; 125 private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f; 126 127 // Linear tone mapping curve example. 128 private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f}; 129 // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points. 130 private static final float[] TONEMAP_CURVE_SRGB = { 131 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, 132 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, 133 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, 134 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f 135 }; 136 private final Rational ZERO_R = new Rational(0, 1); 137 private final Rational ONE_R = new Rational(1, 1); 138 139 private static final int ZOOM_STEPS = 15; 140 141 private enum TorchSeqState { 142 RAMPING_UP, 143 FIRED, 144 RAMPING_DOWN 145 } 146 147 @Override setUp()148 public void setUp() throws Exception { 149 super.setUp(); 150 } 151 152 @Override tearDown()153 public void tearDown() throws Exception { 154 super.tearDown(); 155 } 156 157 /** 158 * Test CaptureRequest settings parcelling. 159 */ 160 @Test testSettingsBinderParcel()161 public void testSettingsBinderParcel() throws Exception { 162 SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5); 163 Surface surface = new Surface(outputTexture); 164 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 165 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 166 try { 167 openDevice(cameraIdsUnderTest[i]); 168 CaptureRequest.Builder requestBuilder = 169 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 170 requestBuilder.addTarget(surface); 171 172 // Check regular/default case 173 CaptureRequest captureRequestOriginal = requestBuilder.build(); 174 Parcel p; 175 p = Parcel.obtain(); 176 captureRequestOriginal.writeToParcel(p, 0); 177 p.setDataPosition(0); 178 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 179 assertEquals("Parcelled camera settings should match", 180 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 181 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 182 p.recycle(); 183 184 // Check capture request with additional physical camera settings 185 String physicalId = new String(Integer.toString(i + 1)); 186 ArraySet<String> physicalIds = new ArraySet<String> (); 187 physicalIds.add(physicalId); 188 189 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW, 190 physicalIds); 191 requestBuilder.addTarget(surface); 192 captureRequestOriginal = requestBuilder.build(); 193 p = Parcel.obtain(); 194 captureRequestOriginal.writeToParcel(p, 0); 195 p.setDataPosition(0); 196 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 197 assertEquals("Parcelled camera settings should match", 198 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 199 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 200 p.recycle(); 201 202 // Check consistency between parcel write and read by stacking 2 203 // CaptureRequest objects when writing and reading. 204 p = Parcel.obtain(); 205 captureRequestOriginal.writeToParcel(p, 0); 206 captureRequestOriginal.writeToParcel(p, 0); 207 p.setDataPosition(0); 208 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 209 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 210 p.recycle(); 211 212 // Check various invalid cases 213 p = Parcel.obtain(); 214 p.writeInt(-1); 215 p.setDataPosition(0); 216 try { 217 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 218 fail("should get RuntimeException due to invalid number of settings"); 219 } catch (RuntimeException e) { 220 // Expected 221 } 222 p.recycle(); 223 224 p = Parcel.obtain(); 225 p.writeInt(0); 226 p.setDataPosition(0); 227 try { 228 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 229 fail("should get RuntimeException due to invalid number of settings"); 230 } catch (RuntimeException e) { 231 // Expected 232 } 233 p.recycle(); 234 235 p = Parcel.obtain(); 236 p.writeInt(1); 237 p.setDataPosition(0); 238 try { 239 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 240 fail("should get RuntimeException due to absent settings"); 241 } catch (RuntimeException e) { 242 // Expected 243 } 244 p.recycle(); 245 } finally { 246 closeDevice(); 247 } 248 } 249 } 250 251 /** 252 * Test black level lock when exposure value change. 253 * <p> 254 * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the 255 * camera device should lock the black level. When the exposure values are changed, 256 * the camera may require reset black level Since changes to certain capture 257 * parameters (such as exposure time) may require resetting of black level 258 * compensation. However, the black level must remain locked after exposure 259 * value changes (when requests have lock ON). 260 * </p> 261 */ 262 @Test testBlackLevelLock()263 public void testBlackLevelLock() throws Exception { 264 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 265 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 266 try { 267 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 268 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 269 continue; 270 } 271 272 openDevice(cameraIdsUnderTest[i]); 273 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 274 CaptureRequest.Builder requestBuilder = 275 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 276 277 // Start with default manual exposure time, with black level being locked. 278 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true); 279 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 280 281 Size previewSz = 282 getMaxPreviewSize(mCamera.getId(), mCameraManager, 283 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 284 285 startPreview(requestBuilder, previewSz, listener); 286 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 287 // No lock OFF state is allowed as the exposure is not changed. 288 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0); 289 290 // Double the exposure time and gain, with black level still being locked. 291 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2); 292 listener = new SimpleCaptureCallback(); 293 startPreview(requestBuilder, previewSz, listener); 294 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 295 // Allow at most one lock OFF state as the exposure is changed once. 296 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1); 297 298 stopPreview(); 299 } finally { 300 closeDevice(); 301 } 302 } 303 } 304 305 /** 306 * Test dynamic black/white levels if they are supported. 307 * 308 * <p> 309 * If the dynamic black and white levels are reported, test below: 310 * 1. the dynamic black and white levels shouldn't deviate from the global value too much 311 * for different sensitivities. 312 * 2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and 313 * calculate the optical black level values. The reported dynamic black level should be 314 * close enough to the optical black level values. 315 * </p> 316 */ 317 @Test testDynamicBlackWhiteLevel()318 public void testDynamicBlackWhiteLevel() throws Exception { 319 for (String id : getCameraIdsUnderTest()) { 320 try { 321 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) { 322 continue; 323 } 324 openDevice(id); 325 dynamicBlackWhiteLevelTestByCamera(); 326 } finally { 327 closeDevice(); 328 } 329 } 330 } 331 332 /** 333 * Basic lens shading map request test. 334 * <p> 335 * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will 336 * be applied by the camera device, and an identity lens shading map data 337 * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON. 338 * </p> 339 * <p> 340 * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction 341 * will be applied by the camera device. The lens shading map data can be 342 * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON. 343 * </p> 344 */ 345 @Test testLensShadingMap()346 public void testLensShadingMap() throws Exception { 347 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 348 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 349 try { 350 StaticMetadata staticInfo = mAllStaticInfo.get(cameraIdsUnderTest[i]); 351 if (!staticInfo.isManualLensShadingMapSupported()) { 352 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 353 " doesn't support lens shading controls, skipping test"); 354 continue; 355 } 356 357 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject( 358 staticInfo.getAvailableLensShadingMapModesChecked())); 359 360 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) { 361 continue; 362 } 363 364 openDevice(cameraIdsUnderTest[i]); 365 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 366 CaptureRequest.Builder requestBuilder = 367 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 368 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 369 STATISTICS_LENS_SHADING_MAP_MODE_ON); 370 371 Size previewSz = 372 getMaxPreviewSize(mCamera.getId(), mCameraManager, 373 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 374 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject( 375 mStaticInfo.getAvailableLensShadingModesChecked())); 376 377 // Shading map mode OFF, lensShadingMapMode ON, camera device 378 // should output unity maps. 379 if (lensShadingModes.contains(SHADING_MODE_OFF)) { 380 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF); 381 listener = new SimpleCaptureCallback(); 382 startPreview(requestBuilder, previewSz, listener); 383 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 384 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF); 385 } 386 387 // Shading map mode FAST, lensShadingMapMode ON, camera device 388 // should output valid maps. 389 if (lensShadingModes.contains(SHADING_MODE_FAST)) { 390 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST); 391 392 listener = new SimpleCaptureCallback(); 393 startPreview(requestBuilder, previewSz, listener); 394 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 395 // Allow at most one lock OFF state as the exposure is changed once. 396 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST); 397 } 398 399 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device 400 // should output valid maps. 401 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) { 402 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY); 403 404 listener = new SimpleCaptureCallback(); 405 startPreview(requestBuilder, previewSz, listener); 406 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 407 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY); 408 } 409 410 stopPreview(); 411 } finally { 412 closeDevice(); 413 } 414 } 415 } 416 417 /** 418 * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control. 419 * <p> 420 * Test all available anti-banding modes, check if the exposure time adjustment is 421 * correct. 422 * </p> 423 */ 424 @Test testAntiBandingModes()425 public void testAntiBandingModes() throws Exception { 426 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 427 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 428 try { 429 // Without manual sensor control, exposure time cannot be verified 430 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 431 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 432 continue; 433 } 434 435 openDevice(cameraIdsUnderTest[i]); 436 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked(); 437 438 Size previewSz = 439 getMaxPreviewSize(mCamera.getId(), mCameraManager, 440 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 441 442 for (int mode : modes) { 443 antiBandingTestByMode(previewSz, mode); 444 } 445 } finally { 446 closeDevice(); 447 } 448 } 449 450 } 451 452 /** 453 * Test AE mode and lock. 454 * 455 * <p> 456 * For AE lock, when it is locked, exposure parameters shouldn't be changed. 457 * For AE modes, each mode should satisfy the per frame controls defined in 458 * API specifications. 459 * </p> 460 */ 461 @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests testAeModeAndLock()462 public void testAeModeAndLock() throws Exception { 463 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 464 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 465 try { 466 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 467 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 468 " does not support color outputs, skipping"); 469 continue; 470 } 471 472 openDevice(cameraIdsUnderTest[i]); 473 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 474 475 // Update preview surface with given size for all sub-tests. 476 updatePreviewSurface(maxPreviewSz); 477 478 // Test aeMode and lock 479 int[] aeModes = mStaticInfo.getAeAvailableModesChecked(); 480 for (int mode : aeModes) { 481 aeModeAndLockTestByMode(mode); 482 } 483 } finally { 484 closeDevice(); 485 } 486 } 487 } 488 489 /** Test {@link CaptureRequest#FLASH_MODE} control. 490 * <p> 491 * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control 492 * and {@link CaptureResult#FLASH_STATE} result. 493 * </p> 494 */ 495 @Test testFlashControl()496 public void testFlashControl() throws Exception { 497 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 498 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 499 try { 500 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 501 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 502 " does not support color outputs, skipping"); 503 continue; 504 } 505 506 openDevice(cameraIdsUnderTest[i]); 507 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 508 CaptureRequest.Builder requestBuilder = 509 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 510 511 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 512 513 startPreview(requestBuilder, maxPreviewSz, listener); 514 515 // Flash control can only be used when the AE mode is ON or OFF. 516 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON); 517 518 // LEGACY won't support AE mode OFF 519 boolean aeOffModeSupported = false; 520 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) { 521 if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 522 aeOffModeSupported = true; 523 } 524 } 525 if (aeOffModeSupported) { 526 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF); 527 } 528 529 stopPreview(); 530 } finally { 531 closeDevice(); 532 } 533 } 534 } 535 536 /** 537 * Test that the flash can be successfully turned off given various initial and final 538 * AE_CONTROL modes for repeating CaptureRequests. 539 */ 540 @Test testFlashTurnOff()541 public void testFlashTurnOff() throws Exception { 542 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 543 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 544 try { 545 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 546 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 547 " does not support color outputs, skipping"); 548 continue; 549 } 550 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).hasFlash()) { 551 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 552 " does not support flash, skipping"); 553 continue; 554 } 555 openDevice(cameraIdsUnderTest[i]); 556 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 557 CaptureRequest.Builder requestBuilder = 558 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 559 560 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 561 562 startPreview(requestBuilder, maxPreviewSz, listener); 563 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, cameraIdsUnderTest[i]); 564 flashTurnOffTest(listener, isLegacy, 565 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 566 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); 567 568 flashTurnOffTest(listener, isLegacy, 569 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 570 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 571 572 flashTurnOffTest(listener, isLegacy, 573 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 574 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 575 576 stopPreview(); 577 } finally { 578 closeDevice(); 579 } 580 } 581 582 } 583 584 /** 585 * Test face detection modes and results. 586 */ 587 @Test testFaceDetection()588 public void testFaceDetection() throws Exception { 589 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 590 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 591 try { 592 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 593 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 594 " does not support color outputs, skipping"); 595 continue; 596 } 597 openDevice(cameraIdsUnderTest[i]); 598 faceDetectionTestByCamera(); 599 } finally { 600 closeDevice(); 601 } 602 } 603 } 604 605 /** 606 * Test tone map modes and controls. 607 */ 608 @Test testToneMapControl()609 public void testToneMapControl() throws Exception { 610 for (String id : getCameraIdsUnderTest()) { 611 try { 612 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) { 613 Log.i(TAG, "Camera " + id + 614 " doesn't support tone mapping controls, skipping test"); 615 continue; 616 } 617 openDevice(id); 618 toneMapTestByCamera(); 619 } finally { 620 closeDevice(); 621 } 622 } 623 } 624 625 /** 626 * Test color correction modes and controls. 627 */ 628 @Test testColorCorrectionControl()629 public void testColorCorrectionControl() throws Exception { 630 for (String id : getCameraIdsUnderTest()) { 631 try { 632 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) { 633 Log.i(TAG, "Camera " + id + 634 " doesn't support color correction controls, skipping test"); 635 continue; 636 } 637 openDevice(id); 638 colorCorrectionTestByCamera(); 639 } finally { 640 closeDevice(); 641 } 642 } 643 } 644 645 /** 646 * Test edge mode control for Fps not exceeding 30. 647 */ 648 @Test testEdgeModeControl()649 public void testEdgeModeControl() throws Exception { 650 for (String id : getCameraIdsUnderTest()) { 651 try { 652 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 653 Log.i(TAG, "Camera " + id + 654 " doesn't support EDGE_MODE controls, skipping test"); 655 continue; 656 } 657 658 openDevice(id); 659 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 660 edgeModesTestByCamera(fpsRanges); 661 } finally { 662 closeDevice(); 663 } 664 } 665 } 666 667 /** 668 * Test edge mode control for Fps greater than 30. 669 */ 670 @Test testEdgeModeControlFastFps()671 public void testEdgeModeControlFastFps() throws Exception { 672 for (String id : getCameraIdsUnderTest()) { 673 try { 674 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 675 Log.i(TAG, "Camera " + id + 676 " doesn't support EDGE_MODE controls, skipping test"); 677 continue; 678 } 679 680 openDevice(id); 681 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 682 edgeModesTestByCamera(fpsRanges); 683 } finally { 684 closeDevice(); 685 } 686 } 687 688 } 689 690 /** 691 * Test focus distance control. 692 */ 693 @Test 694 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testFocusDistanceControl()695 public void testFocusDistanceControl() throws Exception { 696 for (String id : getCameraIdsUnderTest()) { 697 try { 698 StaticMetadata staticInfo = mAllStaticInfo.get(id); 699 if (!staticInfo.hasFocuser()) { 700 Log.i(TAG, "Camera " + id + " has no focuser, skipping test"); 701 continue; 702 } 703 704 if (!staticInfo.isCapabilitySupported( 705 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 706 Log.i(TAG, "Camera " + id + 707 " does not support MANUAL_SENSOR, skipping test"); 708 continue; 709 } 710 711 openDevice(id); 712 focusDistanceTestByCamera(); 713 } finally { 714 closeDevice(); 715 } 716 } 717 } 718 719 /** 720 * Test noise reduction mode for fps ranges not exceeding 30 721 */ 722 @Test testNoiseReductionModeControl()723 public void testNoiseReductionModeControl() throws Exception { 724 for (String id : getCameraIdsUnderTest()) { 725 try { 726 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 727 Log.i(TAG, "Camera " + id + 728 " doesn't support noise reduction mode, skipping test"); 729 continue; 730 } 731 732 openDevice(id); 733 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 734 noiseReductionModeTestByCamera(fpsRanges); 735 } finally { 736 closeDevice(); 737 } 738 } 739 } 740 741 /** 742 * Test noise reduction mode for fps ranges greater than 30 743 */ 744 @Test testNoiseReductionModeControlFastFps()745 public void testNoiseReductionModeControlFastFps() throws Exception { 746 for (String id : getCameraIdsUnderTest()) { 747 try { 748 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 749 Log.i(TAG, "Camera " + id + 750 " doesn't support noise reduction mode, skipping test"); 751 continue; 752 } 753 754 openDevice(id); 755 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 756 noiseReductionModeTestByCamera(fpsRanges); 757 } finally { 758 closeDevice(); 759 } 760 } 761 } 762 763 /** 764 * Test AWB lock control. 765 * 766 * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p> 767 */ 768 @Test testAwbModeAndLock()769 public void testAwbModeAndLock() throws Exception { 770 for (String id : getCameraIdsUnderTest()) { 771 try { 772 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 773 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 774 continue; 775 } 776 openDevice(id); 777 awbModeAndLockTestByCamera(); 778 } finally { 779 closeDevice(); 780 } 781 } 782 } 783 784 /** 785 * Test different AF modes. 786 */ 787 @Test testAfModes()788 public void testAfModes() throws Exception { 789 for (String id : getCameraIdsUnderTest()) { 790 try { 791 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 792 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 793 continue; 794 } 795 openDevice(id); 796 afModeTestByCamera(); 797 } finally { 798 closeDevice(); 799 } 800 } 801 } 802 803 /** 804 * Test video and optical stabilizations. 805 */ 806 @Test testCameraStabilizations()807 public void testCameraStabilizations() throws Exception { 808 for (String id : getCameraIdsUnderTest()) { 809 try { 810 StaticMetadata staticInfo = mAllStaticInfo.get(id); 811 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys(); 812 if (!(keys.contains( 813 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) || 814 keys.contains( 815 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) { 816 Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes"); 817 continue; 818 } 819 if (!staticInfo.isColorOutputSupported()) { 820 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 821 continue; 822 } 823 openDevice(id); 824 stabilizationTestByCamera(); 825 } finally { 826 closeDevice(); 827 } 828 } 829 } 830 831 /** 832 * Test digitalZoom (center wise and non-center wise), validate the returned crop regions. 833 * The max preview size is used for each camera. 834 */ 835 @Test testDigitalZoom()836 public void testDigitalZoom() throws Exception { 837 for (String id : getCameraIdsUnderTest()) { 838 try { 839 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 840 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 841 continue; 842 } 843 openDevice(id); 844 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 845 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/false); 846 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/true); 847 } finally { 848 closeDevice(); 849 } 850 } 851 } 852 853 /** 854 * Test zoom using CONTROL_ZOOM_RATIO, validate the returned crop regions and zoom ratio. 855 * The max preview size is used for each camera. 856 */ 857 @Test testZoomRatio()858 public void testZoomRatio() throws Exception { 859 for (String id : getCameraIdsUnderTest()) { 860 try { 861 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 862 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 863 continue; 864 } 865 openDevice(id); 866 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 867 zoomRatioTestByCamera(maxPreviewSize); 868 } finally { 869 closeDevice(); 870 } 871 } 872 } 873 874 /** 875 * Test that zoom doesn't incur non-monotonic timestamp sequence 876 * 877 * Camera API requires that camera timestamps monotonically increase. 878 */ 879 @Test 880 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testZoomTimestampIncrease()881 public void testZoomTimestampIncrease() throws Exception { 882 if (PropertyUtil.getFirstApiLevel() <= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) { 883 // Only run test for first API level V or higher 884 return; 885 } 886 887 for (String id : getCameraIdsUnderTest()) { 888 try { 889 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 890 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 891 continue; 892 } 893 openDevice(id); 894 zoomTimestampIncreaseTestByCamera(); 895 } finally { 896 closeDevice(); 897 } 898 } 899 } 900 901 /** 902 * Test digital zoom and all preview size combinations. 903 * TODO: this and above test should all be moved to preview test class. 904 */ 905 @Test testDigitalZoomPreviewCombinations()906 public void testDigitalZoomPreviewCombinations() throws Exception { 907 for (String id : getCameraIdsUnderTest()) { 908 try { 909 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 910 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 911 continue; 912 } 913 openDevice(id); 914 digitalZoomPreviewCombinationTestByCamera(); 915 } finally { 916 closeDevice(); 917 } 918 } 919 } 920 921 /** 922 * Test scene mode controls. 923 */ 924 @Test testSceneModes()925 public void testSceneModes() throws Exception { 926 for (String id : getCameraIdsUnderTest()) { 927 try { 928 if (mAllStaticInfo.get(id).isSceneModeSupported()) { 929 openDevice(id); 930 sceneModeTestByCamera(); 931 } 932 } finally { 933 closeDevice(); 934 } 935 } 936 } 937 938 /** 939 * Test effect mode controls. 940 */ 941 @Test testEffectModes()942 public void testEffectModes() throws Exception { 943 for (String id : getCameraIdsUnderTest()) { 944 try { 945 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 946 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 947 continue; 948 } 949 openDevice(id); 950 effectModeTestByCamera(); 951 } finally { 952 closeDevice(); 953 } 954 } 955 } 956 957 /** 958 * Test extended scene mode controls. 959 */ 960 @Test testExtendedSceneModes()961 public void testExtendedSceneModes() throws Exception { 962 for (String id : getCameraIdsUnderTest()) { 963 try { 964 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 965 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 966 continue; 967 } 968 openDevice(id); 969 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 970 extendedSceneModeTestByCamera(fpsRanges); 971 } finally { 972 closeDevice(); 973 } 974 } 975 } 976 977 /** 978 * Test basic auto-framing. 979 */ 980 @Test testAutoframing()981 public void testAutoframing() throws Exception { 982 for (String id : getCameraIdsUnderTest()) { 983 try { 984 if (!mAllStaticInfo.get(id).isAutoframingSupported()) { 985 Log.i(TAG, "Camera " + id + " does not support auto-framing, skipping"); 986 continue; 987 } 988 openDevice(id); 989 autoframingTestByCamera(); 990 } finally { 991 closeDevice(); 992 } 993 } 994 } 995 996 /** 997 * Test manual flash strength level control. 998 */ 999 @Test testManualFlashStrengthLevelControl()1000 public void testManualFlashStrengthLevelControl() throws Exception { 1001 for (String id : getCameraIdsUnderTest()) { 1002 try { 1003 if (!mAllStaticInfo.get(id).isManualFlashStrengthControlSupported()) { 1004 Log.i(TAG, "Camera " + id + " does not support manual flash " 1005 + "strength control, skipping"); 1006 continue; 1007 } 1008 openDevice(id); 1009 manualFlashStrengthControlTestByCamera(); 1010 } finally { 1011 closeDevice(); 1012 } 1013 } 1014 } 1015 1016 /** 1017 * Test AE mode ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY. 1018 */ 1019 @Test 1020 @RequiresFlagsEnabled(Flags.FLAG_CAMERA_AE_MODE_LOW_LIGHT_BOOST) testAeModeOnLowLightBoostBrightnessPriority()1021 public void testAeModeOnLowLightBoostBrightnessPriority() throws Exception { 1022 for (String id : getCameraIdsUnderTest()) { 1023 try { 1024 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1025 if (!staticInfo.isAeModeLowLightBoostSupported()) { 1026 Log.i(TAG, "Camera " + id + " does not have AE mode " 1027 + "ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY, skipping"); 1028 continue; 1029 } 1030 openDevice(id); 1031 testAeModeOnLowLightBoostBrightnessPriorityTestByCamera(); 1032 } finally { 1033 closeDevice(); 1034 } 1035 } 1036 } 1037 1038 /** 1039 * Test settings override controls. 1040 */ 1041 @Test testSettingsOverrides()1042 public void testSettingsOverrides() throws Exception { 1043 for (String id : getCameraIdsUnderTest()) { 1044 try { 1045 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1046 if (!staticInfo.isColorOutputSupported()) { 1047 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1048 continue; 1049 } 1050 if (!staticInfo.isZoomSettingsOverrideSupported()) { 1051 Log.i(TAG, "Camera " + id + " does not support zoom overrides, skipping"); 1052 continue; 1053 } 1054 openDevice(id); 1055 settingsOverrideTestByCamera(); 1056 } finally { 1057 closeDevice(); 1058 } 1059 } 1060 } 1061 1062 // TODO: add 3A state machine test. 1063 1064 /** 1065 * Per camera dynamic black and white level test. 1066 */ dynamicBlackWhiteLevelTestByCamera()1067 private void dynamicBlackWhiteLevelTestByCamera() throws Exception { 1068 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1069 SimpleImageReaderListener imageListener = null; 1070 CaptureRequest.Builder previewBuilder = 1071 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1072 CaptureRequest.Builder rawBuilder = null; 1073 Size previewSize = 1074 getMaxPreviewSize(mCamera.getId(), mCameraManager, 1075 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 1076 Size rawSize = null; 1077 boolean canCaptureBlackRaw = 1078 mStaticInfo.isCapabilitySupported( 1079 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) && 1080 mStaticInfo.isOpticalBlackRegionSupported(); 1081 if (canCaptureBlackRaw) { 1082 // Capture Raw16, then calculate the optical black, and use it to check with the dynamic 1083 // black level. 1084 rawBuilder = 1085 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 1086 rawSize = mStaticInfo.getRawDimensChecked(); 1087 imageListener = new SimpleImageReaderListener(); 1088 prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize, 1089 resultListener, imageListener); 1090 } else { 1091 startPreview(previewBuilder, previewSize, resultListener); 1092 } 1093 1094 // Capture a sequence of frames with different sensitivities and validate the black/white 1095 // level values 1096 int[] sensitivities = getSensitivityTestValuesSorted(); 1097 float[][] dynamicBlackLevels = new float[sensitivities.length][]; 1098 int[] dynamicWhiteLevels = new int[sensitivities.length]; 1099 float[][] opticalBlackLevels = new float[sensitivities.length][]; 1100 for (int i = 0; i < sensitivities.length; i++) { 1101 CaptureResult result = null; 1102 if (canCaptureBlackRaw) { 1103 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1104 CaptureRequest rawRequest = rawBuilder.build(); 1105 mSession.capture(rawRequest, resultListener, mHandler); 1106 result = resultListener.getCaptureResultForRequest(rawRequest, 1107 NUM_RESULTS_WAIT_TIMEOUT); 1108 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS); 1109 1110 // Get max (area-wise) optical black region 1111 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get( 1112 CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS); 1113 Rect maxRegion = opticalBlackRegions[0]; 1114 for (Rect region : opticalBlackRegions) { 1115 if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) { 1116 maxRegion = region; 1117 } 1118 } 1119 1120 // Get average black pixel values in the region (region is multiple of 2x2) 1121 Image.Plane rawPlane = rawImage.getPlanes()[0]; 1122 ByteBuffer rawBuffer = rawPlane.getBuffer(); 1123 float[] avgBlackLevels = {0, 0, 0, 0}; 1124 final int rowSize = rawPlane.getRowStride(); 1125 final int bytePerPixel = rawPlane.getPixelStride(); 1126 if (VERBOSE) { 1127 Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " + 1128 rawPlane.getRowStride()); 1129 } 1130 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) { 1131 for (int col = maxRegion.left; col < maxRegion.right; col += 2) { 1132 int startOffset = row * rowSize + col * bytePerPixel; 1133 avgBlackLevels[0] += rawBuffer.getShort(startOffset); 1134 avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel); 1135 startOffset += rowSize; 1136 avgBlackLevels[2] += rawBuffer.getShort(startOffset); 1137 avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel); 1138 } 1139 } 1140 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2); 1141 for (int m = 0; m < avgBlackLevels.length; m++) { 1142 avgBlackLevels[m] /= numBlackBlocks; 1143 } 1144 opticalBlackLevels[i] = avgBlackLevels; 1145 1146 if (VERBOSE) { 1147 Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s", 1148 sensitivities[i], Arrays.toString(avgBlackLevels))); 1149 } 1150 1151 rawImage.close(); 1152 } else { 1153 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1154 CaptureRequest previewRequest = previewBuilder.build(); 1155 mSession.capture(previewRequest, resultListener, mHandler); 1156 result = resultListener.getCaptureResultForRequest(previewRequest, 1157 NUM_RESULTS_WAIT_TIMEOUT); 1158 } 1159 1160 dynamicBlackLevels[i] = getValueNotNull(result, 1161 CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL); 1162 dynamicWhiteLevels[i] = getValueNotNull(result, 1163 CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL); 1164 } 1165 1166 if (VERBOSE) { 1167 Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities)); 1168 Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)); 1169 Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels)); 1170 if (canCaptureBlackRaw) { 1171 Log.v(TAG, "Optical black level results " + 1172 Arrays.deepToString(opticalBlackLevels)); 1173 } 1174 } 1175 1176 // check the dynamic black level against global black level. 1177 // Implicit guarantee: if the dynamic black level is supported, fixed black level must be 1178 // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions). 1179 BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get( 1180 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN); 1181 int[] fixedBlackLevels = new int[4]; 1182 int fixedWhiteLevel = mStaticInfo.getCharacteristics().get( 1183 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL); 1184 blackPattern.copyTo(fixedBlackLevels, 0); 1185 float maxBlackDeviation = 0; 1186 int maxWhiteDeviation = 0; 1187 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1188 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1189 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) { 1190 maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]); 1191 } 1192 } 1193 if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) { 1194 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel); 1195 } 1196 } 1197 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level" 1198 + " exceed threshold." 1199 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels), 1200 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation); 1201 mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold." 1202 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels), 1203 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, 1204 (float)maxWhiteDeviation); 1205 1206 // Validate against optical black levels if it is available 1207 if (canCaptureBlackRaw) { 1208 maxBlackDeviation = 0; 1209 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1210 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1211 if (maxBlackDeviation < 1212 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) { 1213 maxBlackDeviation = 1214 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]); 1215 } 1216 } 1217 } 1218 1219 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black" 1220 + " exceed threshold." 1221 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels) 1222 + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels), 1223 fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN, 1224 maxBlackDeviation); 1225 } 1226 } 1227 noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1228 private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1229 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1230 CaptureRequest.Builder requestBuilder = 1231 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1232 int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked(); 1233 1234 for (int mode : availableModes) { 1235 requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode); 1236 1237 // Test that OFF and FAST mode should not slow down the frame rate. 1238 if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF || 1239 mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) { 1240 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1241 } 1242 1243 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1244 startPreview(requestBuilder, maxPrevSize, resultListener); 1245 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1246 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1247 1248 verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode, 1249 resultListener, NUM_FRAMES_VERIFIED); 1250 } 1251 1252 stopPreview(); 1253 } 1254 focusDistanceTestByCamera()1255 private void focusDistanceTestByCamera() throws Exception { 1256 CaptureRequest.Builder requestBuilder = 1257 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1258 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 1259 int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked(); 1260 float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED; 1261 if (calibrationStatus == 1262 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1263 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED; 1264 } else if (calibrationStatus == 1265 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) { 1266 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE; 1267 } 1268 1269 // Test changing focus distance with repeating request 1270 focusDistanceTestRepeating(requestBuilder, errorMargin); 1271 1272 if (calibrationStatus == 1273 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1274 // Test changing focus distance with burst request 1275 focusDistanceTestBurst(requestBuilder, errorMargin); 1276 } 1277 } 1278 verifyFocusRange(CaptureResult result, float focusDistance)1279 private void verifyFocusRange(CaptureResult result, float focusDistance) { 1280 if (PropertyUtil.getVendorApiLevel() < 33) { 1281 // Skip, as this only applies to UDC and above 1282 if (VERBOSE) { 1283 Log.v(TAG, "Skipping FOCUS_RANGE verification due to API level"); 1284 } 1285 return; 1286 } 1287 1288 Pair<Float, Float> focusRange = result.get(CaptureResult.LENS_FOCUS_RANGE); 1289 if (focusRange != null) { 1290 // Prevent differences in floating point precision between manual request and HAL 1291 // result, some margin need to be considered for focusRange.near and far check 1292 float focusRangeNear = focusRange.first * (1.0f + FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1293 float focusRangeFar = focusRange.second * (1.0f - FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1294 1295 mCollector.expectLessOrEqual("Focus distance should be less than or equal to " 1296 + "FOCUS_RANGE.near (with margin)", focusRangeNear, focusDistance); 1297 mCollector.expectGreaterOrEqual("Focus distance should be greater than or equal to " 1298 + "FOCUS_RANGE.far (with margin)", focusRangeFar, focusDistance); 1299 } else if (VERBOSE) { 1300 Log.v(TAG, "FOCUS_RANGE undefined, skipping verification"); 1301 } 1302 } 1303 focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1304 private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, 1305 float errorMargin) throws Exception { 1306 CaptureRequest request; 1307 float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0); 1308 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1309 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1310 startPreview(requestBuilder, maxPrevSize, resultListener); 1311 1312 float[] resultDistances = new float[testDistances.length]; 1313 int[] resultLensStates = new int[testDistances.length]; 1314 1315 // Collect results 1316 for (int i = 0; i < testDistances.length; i++) { 1317 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1318 request = requestBuilder.build(); 1319 resultListener = new SimpleCaptureCallback(); 1320 mSession.setRepeatingRequest(request, resultListener, mHandler); 1321 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1322 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1323 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1324 CaptureResult result = resultListener.getCaptureResultForRequest(request, 1325 NUM_RESULTS_WAIT_TIMEOUT); 1326 1327 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1328 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1329 1330 verifyFocusRange(result, resultDistances[i]); 1331 1332 if (VERBOSE) { 1333 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i] 1334 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1335 } 1336 } 1337 1338 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1339 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0, 1340 errorMargin); 1341 1342 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1343 1344 // Test hyperfocal distance optionally 1345 float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1346 if (hyperFocalDistance > 0) { 1347 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance); 1348 request = requestBuilder.build(); 1349 resultListener = new SimpleCaptureCallback(); 1350 mSession.setRepeatingRequest(request, resultListener, mHandler); 1351 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1352 1353 // Then wait for the lens.state to be stationary. 1354 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1355 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1356 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1357 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1358 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" + 1359 " requested value", focusDistance, 1360 hyperFocalDistance * (1.0f - errorMargin), 1361 hyperFocalDistance * (1.0f + errorMargin)); 1362 } 1363 } 1364 } 1365 focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1366 private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, 1367 float errorMargin) throws Exception { 1368 1369 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1370 float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT, 1371 NUM_FOCUS_DISTANCES_REPEAT); 1372 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1373 startPreview(requestBuilder, maxPrevSize, resultListener); 1374 1375 float[] resultDistances = new float[testDistances.length]; 1376 int[] resultLensStates = new int[testDistances.length]; 1377 1378 final int maxPipelineDepth = mStaticInfo.getCharacteristics().get( 1379 CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); 1380 1381 // Move lens to starting position, and wait for the lens.state to be stationary. 1382 CaptureRequest request; 1383 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]); 1384 request = requestBuilder.build(); 1385 mSession.setRepeatingRequest(request, resultListener, mHandler); 1386 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1387 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1388 1389 // Submit burst of requests with different focus distances 1390 List<CaptureRequest> burst = new ArrayList<>(); 1391 for (int i = 0; i < testDistances.length; i ++) { 1392 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1393 burst.add(requestBuilder.build()); 1394 } 1395 mSession.captureBurst(burst, resultListener, mHandler); 1396 1397 for (int i = 0; i < testDistances.length; i++) { 1398 CaptureResult result = resultListener.getCaptureResultForRequest( 1399 burst.get(i), maxPipelineDepth+1); 1400 1401 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1402 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1403 1404 verifyFocusRange(result, resultDistances[i]); 1405 1406 if (VERBOSE) { 1407 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i] 1408 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1409 } 1410 } 1411 1412 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1413 /*ascendingOrder*/true, /*noOvershoot*/true, 1414 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT, 1415 errorMargin); 1416 1417 } 1418 1419 /** 1420 * Verify focus distance control. 1421 * 1422 * Assumption: 1423 * - First repeatStart+1 elements of requestedDistances share the same value 1424 * - Last repeatEnd+1 elements of requestedDistances share the same value 1425 * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder. 1426 * - Focuser is at requestedDistances[0] at the beginning of the test. 1427 * 1428 * @param requestedDistances The requested focus distances 1429 * @param resultDistances The result focus distances 1430 * @param lensStates The result lens states 1431 * @param ascendingOrder The order of the expected focus distance request/output 1432 * @param noOvershoot Assert that focus control doesn't overshoot the requested value 1433 * @param repeatStart The number of times the starting focus distance is repeated 1434 * @param repeatEnd The number of times the ending focus distance is repeated 1435 * @param errorMargin The error margin between request and result 1436 */ verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1437 private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances, 1438 int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, 1439 int repeatEnd, float errorMargin) { 1440 1441 float minValue = 0; 1442 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 1443 float hyperfocalDistance = 0; 1444 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1445 hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1446 } 1447 1448 // Verify lens and focus distance do not change for first repeatStart 1449 // results. 1450 for (int i = 0; i < repeatStart; i ++) { 1451 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1452 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1453 float marginMax = 1454 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1455 1456 mCollector.expectEquals("Lens moves even though focus_distance didn't change", 1457 lensStates[i], CaptureResult.LENS_STATE_STATIONARY); 1458 if (noOvershoot) { 1459 mCollector.expectInRange("Focus distance in result should be close enough to " + 1460 "requested value", resultDistances[i], marginMin, marginMax); 1461 } 1462 mCollector.expectInRange("Result focus distance is out of range", 1463 resultDistances[i], minValue, maxValue); 1464 } 1465 1466 for (int i = repeatStart; i < resultDistances.length-1; i ++) { 1467 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1468 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1469 float marginMax = 1470 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1471 if (noOvershoot) { 1472 // Result focus distance shouldn't overshoot the request 1473 boolean condition; 1474 if (ascendingOrder) { 1475 condition = resultDistances[i] <= marginMax; 1476 } else { 1477 condition = resultDistances[i] >= marginMin; 1478 } 1479 mCollector.expectTrue(String.format( 1480 "Lens shouldn't move past request focus distance. result " + 1481 resultDistances[i] + " vs target of " + 1482 (ascendingOrder ? marginMax : marginMin)), condition); 1483 } 1484 1485 // Verify monotonically increased focus distance setting 1486 boolean condition; 1487 float compareDistance = resultDistances[i+1] - resultDistances[i]; 1488 if (i < resultDistances.length-1-repeatEnd) { 1489 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0); 1490 } else { 1491 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0); 1492 } 1493 mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results [" 1494 + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + "," 1495 + lensStates[i+1] + "] monotonicity is broken"), condition); 1496 } 1497 1498 mCollector.expectTrue(String.format("All values of this array are equal: " + 1499 resultDistances[0] + " " + resultDistances[resultDistances.length-1]), 1500 resultDistances[0] != resultDistances[resultDistances.length-1]); 1501 1502 // Verify lens moved to destination location. 1503 mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] + 1504 " for minFocusDistance should be closed enough to requested value " + 1505 requestedDistances[requestedDistances.length-1], 1506 resultDistances[resultDistances.length-1], 1507 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin), 1508 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin)); 1509 } 1510 1511 /** 1512 * Verify edge mode control results for fpsRanges 1513 */ edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1514 private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1515 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1516 int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked(); 1517 CaptureRequest.Builder requestBuilder = 1518 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1519 1520 for (int mode : edgeModes) { 1521 requestBuilder.set(CaptureRequest.EDGE_MODE, mode); 1522 1523 // Test that OFF and FAST mode should not slow down the frame rate. 1524 if (mode == CaptureRequest.EDGE_MODE_OFF || 1525 mode == CaptureRequest.EDGE_MODE_FAST) { 1526 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1527 } 1528 1529 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1530 startPreview(requestBuilder, maxPrevSize, resultListener); 1531 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1532 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1533 1534 verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener, 1535 NUM_FRAMES_VERIFIED); 1536 } 1537 1538 stopPreview(); 1539 } 1540 1541 /** 1542 * Test color correction controls. 1543 * 1544 * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test 1545 * the unit gain and identity transform.</p> 1546 */ colorCorrectionTestByCamera()1547 private void colorCorrectionTestByCamera() throws Exception { 1548 CaptureRequest request; 1549 CaptureResult result; 1550 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1551 updatePreviewSurface(maxPreviewSz); 1552 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1553 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1554 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1555 1556 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1557 1558 // Default preview result should give valid color correction metadata. 1559 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1560 validateColorCorrectionResult(result, 1561 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1562 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 1563 // TRANSFORM_MATRIX mode 1564 // Only test unit gain and identity transform 1565 List<Integer> availableControlModes = Arrays.asList( 1566 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1567 List<Integer> availableAwbModes = Arrays.asList( 1568 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1569 boolean isManualCCSupported = 1570 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1571 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1572 if (isManualCCSupported) { 1573 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1574 // Only manual AWB mode is supported 1575 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1576 CaptureRequest.CONTROL_MODE_AUTO); 1577 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1578 CaptureRequest.CONTROL_AWB_MODE_OFF); 1579 } else { 1580 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode. 1581 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1582 CaptureRequest.CONTROL_MODE_OFF); 1583 } 1584 1585 RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f); 1586 1587 ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform( 1588 new Rational[] { 1589 ONE_R, ZERO_R, ZERO_R, 1590 ZERO_R, ONE_R, ZERO_R, 1591 ZERO_R, ZERO_R, ONE_R 1592 }); 1593 1594 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1595 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN); 1596 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM); 1597 request = manualRequestBuilder.build(); 1598 mSession.capture(request, listener, mHandler); 1599 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1600 RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS); 1601 ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM); 1602 validateColorCorrectionResult(result, colorCorrectionMode); 1603 mCollector.expectEquals("control mode result/request mismatch", 1604 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1605 mCollector.expectEquals("Color correction gain result/request mismatch", 1606 UNIT_GAIN, gains); 1607 mCollector.expectEquals("Color correction gain result/request mismatch", 1608 IDENTITY_TRANSFORM, transform); 1609 1610 } 1611 1612 // FAST mode 1613 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST; 1614 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1615 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1616 request = manualRequestBuilder.build(); 1617 mSession.capture(request, listener, mHandler); 1618 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1619 validateColorCorrectionResult(result, colorCorrectionMode); 1620 mCollector.expectEquals("control mode result/request mismatch", 1621 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1622 1623 // HIGH_QUALITY mode 1624 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY; 1625 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1626 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1627 request = manualRequestBuilder.build(); 1628 mSession.capture(request, listener, mHandler); 1629 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1630 validateColorCorrectionResult(result, colorCorrectionMode); 1631 mCollector.expectEquals("control mode result/request mismatch", 1632 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1633 } 1634 validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1635 private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) { 1636 final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0); 1637 final int TRANSFORM_SIZE = 9; 1638 Rational[] zeroTransform = new Rational[TRANSFORM_SIZE]; 1639 Arrays.fill(zeroTransform, ZERO_R); 1640 final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform); 1641 1642 RggbChannelVector resultGain; 1643 if ((resultGain = mCollector.expectKeyValueNotNull(result, 1644 CaptureResult.COLOR_CORRECTION_GAINS)) != null) { 1645 mCollector.expectKeyValueNotEquals(result, 1646 CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS); 1647 } 1648 1649 ColorSpaceTransform resultTransform; 1650 if ((resultTransform = mCollector.expectKeyValueNotNull(result, 1651 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) { 1652 mCollector.expectKeyValueNotEquals(result, 1653 CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM); 1654 } 1655 1656 mCollector.expectEquals("color correction mode result/request mismatch", 1657 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 1658 } 1659 1660 /** 1661 * Test that flash can be turned off successfully with a given initial and final AE_CONTROL 1662 * states. 1663 * 1664 * This function expects that initialAeControl and flashOffAeControl will not be either 1665 * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF 1666 * 1667 * @param listener The Capture listener that is used to wait for capture result 1668 * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with. 1669 * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for 1670 * TEMPLATE_PREVIEW repeating requests. 1671 */ flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1672 private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, 1673 int initialAeControl, int flashOffAeControl) throws Exception { 1674 CaptureResult result; 1675 final int NUM_FLASH_REQUESTS_TESTED = 10; 1676 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1677 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1678 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl); 1679 1680 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1681 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1682 1683 // Turn on torch using FLASH_MODE_TORCH 1684 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1685 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1686 CaptureRequest torchOnRequest = requestBuilder.build(); 1687 mSession.setRepeatingRequest(torchOnRequest, listener, mHandler); 1688 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1689 result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT); 1690 // Test that the flash actually turned on continuously. 1691 mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED, 1692 result.get(CaptureResult.FLASH_STATE)); 1693 mSession.stopRepeating(); 1694 // Turn off the torch 1695 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl); 1696 // TODO: jchowdhary@, b/130323585, this line can be removed. 1697 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1698 int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC; 1699 if (mStaticInfo.isPerFrameControlSupported()) { 1700 numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC; 1701 1702 } 1703 // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode 1704 // transitions. The additional request is to check for at least 1 expected (FIRED / READY) 1705 // state. 1706 int numTorchTestSamples = 2 * numAllowedTransitionStates + 1; 1707 CaptureRequest flashOffRequest = requestBuilder.build(); 1708 int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest, 1709 numTorchTestSamples, listener, mHandler); 1710 // Turn it on again. 1711 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1712 // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to 1713 // turn the torch on again. 1714 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1715 CaptureRequest flashModeTorchRequest = requestBuilder.build(); 1716 int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest, 1717 numTorchTestSamples, listener, mHandler); 1718 1719 CaptureResult[] torchStateResults = 1720 new CaptureResult[flashModeTorchRequests + flashModeOffRequests]; 1721 Arrays.fill(torchStateResults, null); 1722 int i = 0; 1723 for (; i < flashModeOffRequests; i++) { 1724 torchStateResults[i] = 1725 listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT); 1726 mCollector.expectNotEquals("Result for flashModeOff request null", 1727 torchStateResults[i], null); 1728 } 1729 for (int j = i; j < torchStateResults.length; j++) { 1730 torchStateResults[j] = 1731 listener.getCaptureResultForRequest(flashModeTorchRequest, 1732 NUM_RESULTS_WAIT_TIMEOUT); 1733 mCollector.expectNotEquals("Result for flashModeTorch request null", 1734 torchStateResults[j], null); 1735 } 1736 if (isLegacy) { 1737 // For LEGACY devices, flash state is null for all situations except: 1738 // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED 1739 // android.flash.mode == TORCH, where flash.state will be FIRED 1740 testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest); 1741 testLegacyTorchStates(torchStateResults, flashModeOffRequests, 1742 torchStateResults.length -1, 1743 flashModeTorchRequest); 1744 } else { 1745 checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests, 1746 flashModeTorchRequests); 1747 } 1748 } 1749 testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1750 private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, 1751 CaptureRequest request) { 1752 for (int i = beg; i <= end; i++) { 1753 Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE); 1754 Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE); 1755 Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1756 if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH || 1757 requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) { 1758 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" + 1759 "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " + 1760 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1761 requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState); 1762 continue; 1763 } 1764 mCollector.expectTrue("For LEGACY devices, flash state must be null when" + 1765 "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " + 1766 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1767 requestFlashMode, resultFlashState == null); 1768 } 1769 } 1770 // We check that torch states appear in the order expected. We don't necessarily know how many 1771 // times each state might appear, however we make sure that the states do not appear out of 1772 // order. checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1773 private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, 1774 List<Integer> stateOrder, boolean isTurningOff) { 1775 Integer flashState; 1776 Integer curIndex = 0; 1777 for (int i = beg; i <= end; i++) { 1778 flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1779 int index = stateOrder.indexOf(flashState); 1780 mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" + 1781 stateOrder, index, -1); 1782 mCollector.expectGreaterOrEqual("state " + flashState + " index " + index + 1783 " is expected to be >= " + curIndex, 1784 curIndex, index); 1785 curIndex = index; 1786 } 1787 } 1788 checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1789 private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, 1790 int numTorchOffSamples, int numTorchOnSamples) { 1791 // We test for flash states from request: 1792 // Request: O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON) 1793 // Valid Result : P/R P/R P/R R R R...P/R P/R P/F P/F P/F F F 1794 // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the 1795 // transition states while switching the torch off, it must not transition to 1796 // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on. 1797 // P - FLASH_STATE_PARTIAL 1798 // R - FLASH_STATE_READY 1799 // F - FLASH_STATE_FIRED 1800 // O(k) - kth FLASH_MODE_OFF request 1801 // T(k) - kth FLASH_MODE_TORCH request 1802 // nOFF - number of torch off samples 1803 // nON - number of torch on samples 1804 Integer flashState; 1805 // Check on -> off transition states 1806 List<Integer> onToOffStateOrderList = new ArrayList<Integer>(); 1807 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1808 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 1809 checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates, 1810 onToOffStateOrderList, true); 1811 // The next frames (before transition) must have its flash state as FLASH_STATE_READY 1812 for (int i = numAllowedTransitionStates + 1; 1813 i < numTorchOffSamples - numAllowedTransitionStates; i++) { 1814 flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE); 1815 mCollector.expectEquals("flash state result must be READY", 1816 CaptureResult.FLASH_STATE_READY, flashState); 1817 } 1818 // check off -> on transition states, before the FLASH_MODE_TORCH request was sent 1819 List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>(); 1820 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 1821 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1822 checkTorchTransitionStates(torchResults, 1823 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1, 1824 offToOnPreStateOrderList, false); 1825 // check off -> on transition states 1826 List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>(); 1827 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1828 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED); 1829 checkTorchTransitionStates(torchResults, 1830 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates, 1831 offToOnPostStateOrderList, false); 1832 // check on states after off -> on transition 1833 // The next frames must have its flash state as FLASH_STATE_FIRED 1834 for (int i = numTorchOffSamples + numAllowedTransitionStates + 1; 1835 i < torchResults.length - 1; i++) { 1836 flashState = torchResults[i].get(CaptureResult.FLASH_STATE); 1837 mCollector.expectEquals("flash state result must be FIRED for frame " + i, 1838 CaptureRequest.FLASH_STATE_FIRED, flashState); 1839 } 1840 } 1841 1842 /** 1843 * Test flash mode control by AE mode. 1844 * <p> 1845 * Only allow AE mode ON or OFF, because other AE mode could run into conflict with 1846 * flash manual control. This function expects the camera to already have an active 1847 * repeating request and be sending results to the listener. 1848 * </p> 1849 * 1850 * @param listener The Capture listener that is used to wait for capture result 1851 * @param aeMode The AE mode for flash to test with 1852 */ flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)1853 private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception { 1854 CaptureResult result; 1855 final int NUM_FLASH_REQUESTS_TESTED = 10; 1856 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1857 1858 if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) { 1859 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode); 1860 } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 1861 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 1862 } else { 1863 throw new IllegalArgumentException("This test only works when AE mode is ON or OFF"); 1864 } 1865 1866 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1867 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1868 1869 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 1870 if (mStaticInfo.getFlashInfoChecked() == false) { 1871 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1872 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 1873 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 1874 + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE, 1875 result.get(CaptureResult.FLASH_STATE)); 1876 } 1877 1878 return; 1879 } 1880 1881 // Test flash SINGLE mode control. Wait for flash state to be READY first. 1882 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 1883 waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY, 1884 NUM_RESULTS_WAIT_TIMEOUT); 1885 } // else the settings were already waited on earlier 1886 1887 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 1888 CaptureRequest flashSinglerequest = requestBuilder.build(); 1889 1890 int flashModeSingleRequests = captureRequestsSynchronized( 1891 flashSinglerequest, listener, mHandler); 1892 waitForNumResults(listener, flashModeSingleRequests - 1); 1893 result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT); 1894 // Result mode must be SINGLE, state must be FIRED. 1895 mCollector.expectEquals("Flash mode result must be SINGLE", 1896 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE)); 1897 mCollector.expectEquals("Flash state result must be FIRED", 1898 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1899 1900 // Test flash TORCH mode control. 1901 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1902 CaptureRequest torchRequest = requestBuilder.build(); 1903 1904 int flashModeTorchRequests = captureRequestsSynchronized(torchRequest, 1905 NUM_FLASH_REQUESTS_TESTED, listener, mHandler); 1906 waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED); 1907 1908 // Verify the results 1909 TorchSeqState state = TorchSeqState.RAMPING_UP; 1910 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1911 result = listener.getCaptureResultForRequest(torchRequest, 1912 NUM_RESULTS_WAIT_TIMEOUT); 1913 int flashMode = result.get(CaptureResult.FLASH_MODE); 1914 int flashState = result.get(CaptureResult.FLASH_STATE); 1915 // Result mode must be TORCH 1916 mCollector.expectEquals("Flash mode result " + i + " must be TORCH", 1917 CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE)); 1918 if (state == TorchSeqState.RAMPING_UP && 1919 flashState == CaptureResult.FLASH_STATE_FIRED) { 1920 state = TorchSeqState.FIRED; 1921 } else if (state == TorchSeqState.FIRED && 1922 flashState == CaptureResult.FLASH_STATE_PARTIAL) { 1923 state = TorchSeqState.RAMPING_DOWN; 1924 } 1925 1926 if (i == 0 && mStaticInfo.isPerFrameControlSupported()) { 1927 mCollector.expectTrue( 1928 "Per frame control device must enter FIRED state on first torch request", 1929 state == TorchSeqState.FIRED); 1930 } 1931 1932 if (state == TorchSeqState.FIRED) { 1933 mCollector.expectEquals("Flash state result " + i + " must be FIRED", 1934 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1935 } else { 1936 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL", 1937 CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE)); 1938 } 1939 } 1940 mCollector.expectTrue("Torch state FIRED never seen", 1941 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN); 1942 1943 // Test flash OFF mode control 1944 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1945 CaptureRequest flashOffrequest = requestBuilder.build(); 1946 1947 int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler); 1948 waitForNumResults(listener, flashModeOffRequests - 1); 1949 result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT); 1950 mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF, 1951 result.get(CaptureResult.FLASH_MODE)); 1952 } 1953 verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)1954 private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, 1955 int mode, boolean isAeManual, long requestExpTime) throws Exception { 1956 // Skip the first a couple of frames as antibanding may not be fully up yet. 1957 final int NUM_FRAMES_SKIPPED = 5; 1958 for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) { 1959 listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1960 } 1961 1962 for (int i = 0; i < numFramesVerified; i++) { 1963 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1964 Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 1965 assertNotNull("Exposure time shouldn't be null", resultExpTime); 1966 Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER); 1967 // Scene flicker result should be always available. 1968 assertNotNull("Scene flicker must not be null", flicker); 1969 assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE && 1970 flicker <= STATISTICS_SCENE_FLICKER_60HZ); 1971 1972 Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE); 1973 assertNotNull("antiBanding mode shouldn't be null", antiBandMode); 1974 assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode, 1975 antiBandMode == mode); 1976 if (isAeManual) { 1977 // First, round down not up, second, need close enough. 1978 validateExposureTime(requestExpTime, resultExpTime); 1979 return; 1980 } 1981 1982 long expectedExpTime = resultExpTime; // Default, no exposure adjustment. 1983 if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) { 1984 // result exposure time must be adjusted by 50Hz illuminant source. 1985 expectedExpTime = 1986 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 1987 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) { 1988 // result exposure time must be adjusted by 60Hz illuminant source. 1989 expectedExpTime = 1990 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 1991 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){ 1992 /** 1993 * Use STATISTICS_SCENE_FLICKER to tell the illuminant source 1994 * and do the exposure adjustment. 1995 */ 1996 expectedExpTime = resultExpTime; 1997 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) { 1998 expectedExpTime = 1999 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 2000 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) { 2001 expectedExpTime = 2002 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 2003 } 2004 } 2005 2006 if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) { 2007 mCollector.addMessage(String.format("Result exposure time %dns diverges too much" 2008 + " from expected exposure time %dns for mode %d when AE is auto", 2009 resultExpTime, expectedExpTime, mode)); 2010 } 2011 } 2012 } 2013 antiBandingTestByMode(Size size, int mode)2014 private void antiBandingTestByMode(Size size, int mode) 2015 throws Exception { 2016 if(VERBOSE) { 2017 Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId()); 2018 } 2019 CaptureRequest.Builder requestBuilder = 2020 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2021 2022 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode); 2023 2024 // Test auto AE mode anti-banding behavior 2025 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 2026 startPreview(requestBuilder, size, resultListener); 2027 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2028 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false, 2029 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK); 2030 2031 // Test manual AE mode anti-banding behavior 2032 // 65ms, must be supported by full capability devices. 2033 final long TEST_MANUAL_EXP_TIME_NS = 65000000L; 2034 long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS); 2035 changeExposure(requestBuilder, manualExpTime); 2036 resultListener = new SimpleCaptureCallback(); 2037 startPreview(requestBuilder, size, resultListener); 2038 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2039 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true, 2040 manualExpTime); 2041 2042 stopPreview(); 2043 } 2044 2045 /** 2046 * Test the all available AE modes and AE lock. 2047 * <p> 2048 * For manual AE mode, test iterates through different sensitivities and 2049 * exposure times, validate the result exposure time correctness. For 2050 * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested. 2051 * For the rest of the AUTO mode, AE lock is tested. 2052 * </p> 2053 * 2054 * @param mode 2055 */ aeModeAndLockTestByMode(int mode)2056 private void aeModeAndLockTestByMode(int mode) 2057 throws Exception { 2058 switch (mode) { 2059 case CONTROL_AE_MODE_OFF: 2060 if (mStaticInfo.isCapabilitySupported( 2061 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 2062 // Test manual exposure control. 2063 aeManualControlTest(); 2064 } else { 2065 Log.w(TAG, 2066 "aeModeAndLockTestByMode - can't test AE mode OFF without " + 2067 "manual sensor control"); 2068 } 2069 break; 2070 case CONTROL_AE_MODE_ON: 2071 case CONTROL_AE_MODE_ON_AUTO_FLASH: 2072 case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: 2073 case CONTROL_AE_MODE_ON_ALWAYS_FLASH: 2074 case CONTROL_AE_MODE_ON_EXTERNAL_FLASH: 2075 // Test AE lock for above AUTO modes. 2076 aeAutoModeTestLock(mode); 2077 break; 2078 default: 2079 throw new UnsupportedOperationException("Unhandled AE mode " + mode); 2080 } 2081 } 2082 2083 /** 2084 * Test AE auto modes. 2085 * <p> 2086 * Use single request rather than repeating request to test AE lock per frame control. 2087 * </p> 2088 */ aeAutoModeTestLock(int mode)2089 private void aeAutoModeTestLock(int mode) throws Exception { 2090 CaptureRequest.Builder requestBuilder = 2091 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2092 if (mStaticInfo.isAeLockSupported()) { 2093 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2094 } 2095 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode); 2096 configurePreviewOutput(requestBuilder); 2097 2098 final int MAX_NUM_CAPTURES_DURING_LOCK = 5; 2099 for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) { 2100 autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i); 2101 } 2102 } 2103 2104 /** 2105 * Issue multiple auto AE captures, then lock AE, validate the AE lock vs. 2106 * the first capture result after the AE lock. The right AE lock behavior is: 2107 * When it is locked, it locks to the current exposure value, and all subsequent 2108 * request with lock ON will have the same exposure value locked. 2109 */ autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock)2110 private void autoAeMultipleCapturesThenTestLock( 2111 CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) 2112 throws Exception { 2113 if (numCapturesDuringLock < 1) { 2114 throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1"); 2115 } 2116 if (VERBOSE) { 2117 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " 2118 + aeMode + " with " + numCapturesDuringLock + " captures before lock"); 2119 } 2120 2121 final int NUM_CAPTURES_BEFORE_LOCK = 2; 2122 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2123 2124 CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock]; 2125 boolean canSetAeLock = mStaticInfo.isAeLockSupported(); 2126 2127 // Reset the AE lock to OFF, since we are reusing this builder many times 2128 if (canSetAeLock) { 2129 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2130 } 2131 2132 // Just send several captures with auto AE, lock off. 2133 CaptureRequest request = requestBuilder.build(); 2134 for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) { 2135 mSession.capture(request, listener, mHandler); 2136 } 2137 waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK); 2138 2139 if (!canSetAeLock) { 2140 // Without AE lock, the remaining tests items won't work 2141 return; 2142 } 2143 2144 // Then fire several capture to lock the AE. 2145 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); 2146 2147 int requestCount = captureRequestsSynchronized( 2148 requestBuilder.build(), numCapturesDuringLock, listener, mHandler); 2149 2150 int[] sensitivities = new int[numCapturesDuringLock]; 2151 long[] expTimes = new long[numCapturesDuringLock]; 2152 Arrays.fill(sensitivities, -1); 2153 Arrays.fill(expTimes, -1L); 2154 2155 // Get the AE lock on result and validate the exposure values. 2156 waitForNumResults(listener, requestCount - numCapturesDuringLock); 2157 for (int i = 0; i < resultsDuringLock.length; i++) { 2158 resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2159 } 2160 2161 for (int i = 0; i < numCapturesDuringLock; i++) { 2162 mCollector.expectKeyValueEquals( 2163 resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true); 2164 } 2165 2166 // Can't read manual sensor/exposure settings without manual sensor 2167 if (mStaticInfo.isCapabilitySupported( 2168 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) { 2169 int sensitivityLocked = 2170 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY); 2171 long expTimeLocked = 2172 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME); 2173 for (int i = 1; i < resultsDuringLock.length; i++) { 2174 mCollector.expectKeyValueEquals( 2175 resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2176 mCollector.expectKeyValueEquals( 2177 resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2178 } 2179 } 2180 } 2181 2182 /** 2183 * Iterate through exposure times and sensitivities for manual AE control. 2184 * <p> 2185 * Use single request rather than repeating request to test manual exposure 2186 * value change per frame control. 2187 * </p> 2188 */ aeManualControlTest()2189 private void aeManualControlTest() 2190 throws Exception { 2191 CaptureRequest.Builder requestBuilder = 2192 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2193 configurePreviewOutput(requestBuilder); 2194 2195 // Warm up pipeline for more accurate timing 2196 SimpleCaptureCallback warmupListener = new SimpleCaptureCallback(); 2197 mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler); 2198 warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2199 2200 // Do manual captures 2201 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 2202 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2203 2204 long[] expTimesNs = getExposureTimeTestValuesSorted(); 2205 int[] sensitivities = getSensitivityTestValuesSorted(); 2206 2207 assertTrue(expTimesNs.length > 0); 2208 assertTrue(sensitivities.length > 0); 2209 2210 // For multiple exposure times, make smart combinations of exposure and sensitivity to 2211 // reduce test time and still have exhaustive coverage. 2212 List<Pair<Long, Integer>> exposureSensitivityTestValues = 2213 new ArrayList<Pair<Long, Integer>>(); 2214 2215 // Min exposure should be tested with all sensitivity values. 2216 for (int i = 0; i < sensitivities.length; i++) { 2217 exposureSensitivityTestValues.add( 2218 new Pair<Long, Integer>(expTimesNs[0], sensitivities[i])); 2219 } 2220 2221 // All other exposure values should be tested only with min and max sensitivity. 2222 for (int i = 1; i < expTimesNs.length; i++) { 2223 exposureSensitivityTestValues.add( 2224 new Pair<Long, Integer>(expTimesNs[i], sensitivities[0])); 2225 2226 if (sensitivities.length > 1) { 2227 exposureSensitivityTestValues.add( 2228 new Pair<Long, Integer>(expTimesNs[i], 2229 sensitivities[sensitivities.length - 1])); 2230 } 2231 } 2232 2233 // Submit single request at a time, then verify the result. 2234 for (int i = 0; i < exposureSensitivityTestValues.size(); i++) { 2235 long exposure = exposureSensitivityTestValues.get(i).first; 2236 int sensitivity = exposureSensitivityTestValues.get(i).second; 2237 2238 if (VERBOSE) { 2239 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " 2240 + sensitivity + ", exposure time " + exposure + "ns"); 2241 } 2242 2243 changeExposure(requestBuilder, exposure, sensitivity); 2244 mSession.capture(requestBuilder.build(), listener, mHandler); 2245 2246 // make sure timeout is long enough for long exposure time - add a 2x safety margin 2247 // to exposure time 2248 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * exposure / 1000000; 2249 CaptureResult result = listener.getCaptureResult(timeoutMs); 2250 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 2251 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 2252 validateExposureTime(exposure, resultExpTimeNs); 2253 validateSensitivity(sensitivity, resultSensitivity); 2254 validateFrameDurationForCapture(result); 2255 } 2256 mSession.stopRepeating(); 2257 2258 // TODO: Add another case to test where we can submit all requests, then wait for 2259 // results, which will hide the pipeline latency. this is not only faster, but also 2260 // test high speed per frame control and synchronization. 2261 } 2262 2263 2264 /** 2265 * Verify black level lock control. 2266 */ verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2267 private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, 2268 int maxLockOffCnt) throws Exception { 2269 int noLockCnt = 0; 2270 for (int i = 0; i < numFramesVerified; i++) { 2271 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2272 Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK); 2273 assertNotNull("Black level lock result shouldn't be null", blackLevelLock); 2274 2275 // Count the lock == false result, which could possibly occur at most once. 2276 if (blackLevelLock == false) { 2277 noLockCnt++; 2278 } 2279 2280 if(VERBOSE) { 2281 Log.v(TAG, "Black level lock result: " + blackLevelLock); 2282 } 2283 } 2284 assertTrue("Black level lock OFF occurs " + noLockCnt + " times, expect at most " 2285 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt); 2286 } 2287 2288 /** 2289 * Verify shading map for different shading modes. 2290 */ verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2291 private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, 2292 int shadingMode) throws Exception { 2293 2294 for (int i = 0; i < numFramesVerified; i++) { 2295 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2296 mCollector.expectEquals("Shading mode result doesn't match request", 2297 shadingMode, result.get(CaptureResult.SHADING_MODE)); 2298 LensShadingMap mapObj = result.get( 2299 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP); 2300 assertNotNull("Map object must not be null", mapObj); 2301 int numElementsInMap = mapObj.getGainFactorCount(); 2302 float[] map = new float[numElementsInMap]; 2303 mapObj.copyGainFactors(map, /*offset*/0); 2304 assertNotNull("Map must not be null", map); 2305 assertFalse(String.format( 2306 "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE), 2307 numElementsInMap >= MAX_SHADING_MAP_SIZE); 2308 assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap, 2309 MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE); 2310 2311 if (shadingMode == CaptureRequest.SHADING_MODE_FAST || 2312 shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) { 2313 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all 2314 // elements >= 1.0f 2315 2316 int badValueCnt = 0; 2317 // Detect the bad values of the map data. 2318 for (int j = 0; j < numElementsInMap; j++) { 2319 if (Float.isNaN(map[j]) || map[j] < 1.0f) { 2320 badValueCnt++; 2321 } 2322 } 2323 assertEquals("Number of value in the map is " + badValueCnt + " out of " 2324 + numElementsInMap, /*expected*/0, /*actual*/badValueCnt); 2325 } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) { 2326 float[] unityMap = new float[numElementsInMap]; 2327 Arrays.fill(unityMap, 1.0f); 2328 // shading mode is OFF, expect to receive a unity map. 2329 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map", 2330 Arrays.equals(unityMap, map)); 2331 } 2332 } 2333 } 2334 2335 /** 2336 * Test face detection for a camera. 2337 */ 2338 private void faceDetectionTestByCamera() throws Exception { 2339 int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked(); 2340 2341 SimpleCaptureCallback listener; 2342 CaptureRequest.Builder requestBuilder = 2343 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2344 2345 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2346 for (int mode : faceDetectModes) { 2347 requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode); 2348 if (VERBOSE) { 2349 Log.v(TAG, "Start testing face detection mode " + mode); 2350 } 2351 2352 // Create a new listener for each run to avoid the results from one run spill 2353 // into another run. 2354 listener = new SimpleCaptureCallback(); 2355 startPreview(requestBuilder, maxPreviewSz, listener); 2356 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2357 verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode); 2358 } 2359 2360 stopPreview(); 2361 } 2362 2363 /** 2364 * Verify face detection results for different face detection modes. 2365 * 2366 * @param listener The listener to get capture result 2367 * @param numFramesVerified Number of results to be verified 2368 * @param faceDetectionMode Face detection mode to be verified against 2369 */ 2370 private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified, 2371 int faceDetectionMode) { 2372 for (int i = 0; i < numFramesVerified; i++) { 2373 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2374 mCollector.expectEquals("Result face detection mode should match the request", 2375 faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE)); 2376 2377 Face[] faces = result.get(CaptureResult.STATISTICS_FACES); 2378 List<Integer> faceIds = new ArrayList<Integer>(faces.length); 2379 List<Integer> faceScores = new ArrayList<Integer>(faces.length); 2380 if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 2381 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode", 2382 0, faces.length); 2383 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 2384 for (Face face : faces) { 2385 mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds()); 2386 faceScores.add(face.getScore()); 2387 mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode", 2388 face.getId() == Face.ID_UNSUPPORTED); 2389 } 2390 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 2391 if (VERBOSE) { 2392 Log.v(TAG, "Number of faces detected: " + faces.length); 2393 } 2394 2395 for (Face face : faces) { 2396 Rect faceBound; 2397 boolean faceRectAvailable = mCollector.expectTrue("Face rectangle " 2398 + "shouldn't be null", face.getBounds() != null); 2399 if (!faceRectAvailable) { 2400 continue; 2401 } 2402 faceBound = face.getBounds(); 2403 2404 faceScores.add(face.getScore()); 2405 faceIds.add(face.getId()); 2406 2407 mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode", 2408 face.getId() != Face.ID_UNSUPPORTED); 2409 boolean leftEyeAvailable = 2410 mCollector.expectTrue("Left eye position shouldn't be null", 2411 face.getLeftEyePosition() != null); 2412 boolean rightEyeAvailable = 2413 mCollector.expectTrue("Right eye position shouldn't be null", 2414 face.getRightEyePosition() != null); 2415 boolean mouthAvailable = 2416 mCollector.expectTrue("Mouth position shouldn't be null", 2417 face.getMouthPosition() != null); 2418 // Eyes/mouth position should be inside of the face rect. 2419 if (leftEyeAvailable) { 2420 Point leftEye = face.getLeftEyePosition(); 2421 mCollector.expectTrue("Left eye " + leftEye + "should be" 2422 + "inside of face rect " + faceBound, 2423 faceBound.contains(leftEye.x, leftEye.y)); 2424 } 2425 if (rightEyeAvailable) { 2426 Point rightEye = face.getRightEyePosition(); 2427 mCollector.expectTrue("Right eye " + rightEye + "should be" 2428 + "inside of face rect " + faceBound, 2429 faceBound.contains(rightEye.x, rightEye.y)); 2430 } 2431 if (mouthAvailable) { 2432 Point mouth = face.getMouthPosition(); 2433 mCollector.expectTrue("Mouth " + mouth + " should be inside of" 2434 + " face rect " + faceBound, 2435 faceBound.contains(mouth.x, mouth.y)); 2436 } 2437 } 2438 } 2439 mCollector.expectValuesInRange("Face scores are invalid", faceScores, 2440 Face.SCORE_MIN, Face.SCORE_MAX); 2441 mCollector.expectValuesUnique("Face ids are invalid", faceIds); 2442 } 2443 } 2444 2445 /** 2446 * Test tone map mode and result by camera 2447 */ 2448 private void toneMapTestByCamera() throws Exception { 2449 if (!mStaticInfo.isManualToneMapSupported()) { 2450 return; 2451 } 2452 2453 CaptureRequest.Builder requestBuilder = 2454 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2455 int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked(); 2456 // Test AUTO modes first. Note that FAST/HQ must both present or not present 2457 for (int i = 0; i < toneMapModes.length; i++) { 2458 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) { 2459 int tmpMode = toneMapModes[0]; 2460 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST; 2461 toneMapModes[i] = tmpMode; 2462 } 2463 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) { 2464 int tmpMode = toneMapModes[1]; 2465 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY; 2466 toneMapModes[i] = tmpMode; 2467 } 2468 } 2469 for (int mode : toneMapModes) { 2470 if (VERBOSE) { 2471 Log.v(TAG, "Testing tonemap mode " + mode); 2472 } 2473 2474 requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode); 2475 switch (mode) { 2476 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE: 2477 TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR, 2478 TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR); 2479 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2480 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2481 2482 toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB, 2483 TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB); 2484 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2485 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2486 break; 2487 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE: 2488 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f); 2489 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2490 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f); 2491 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2492 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); 2493 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2494 break; 2495 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE: 2496 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2497 CaptureRequest.TONEMAP_PRESET_CURVE_REC709); 2498 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2499 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2500 CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); 2501 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2502 break; 2503 default: 2504 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2505 break; 2506 } 2507 } 2508 2509 2510 } 2511 2512 /** 2513 * Test tonemap mode with speficied request settings 2514 * 2515 * @param numFramesVerified Number of results to be verified 2516 * @param requestBuilder the request builder of settings to be tested 2517 */ 2518 private void testToneMapMode (int numFramesVerified, 2519 CaptureRequest.Builder requestBuilder) throws Exception { 2520 final int MIN_TONEMAP_CURVE_POINTS = 2; 2521 final Float ZERO = new Float(0); 2522 final Float ONE = new Float(1.0f); 2523 2524 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2525 int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE); 2526 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2527 startPreview(requestBuilder, maxPreviewSz, listener); 2528 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2529 2530 int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked(); 2531 for (int i = 0; i < numFramesVerified; i++) { 2532 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2533 mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode, 2534 result.get(CaptureResult.TONEMAP_MODE)); 2535 TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE); 2536 int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED); 2537 float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE]; 2538 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN); 2539 float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE]; 2540 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE); 2541 float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE]; 2542 tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0); 2543 tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0); 2544 tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0); 2545 if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) { 2546 /** 2547 * TODO: need figure out a good way to measure the difference 2548 * between request and result, as they may have different array 2549 * size. 2550 */ 2551 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) { 2552 mCollector.expectEquals("Capture result gamma value should match request", 2553 requestBuilder.get(CaptureRequest.TONEMAP_GAMMA), 2554 result.get(CaptureResult.TONEMAP_GAMMA)); 2555 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) { 2556 mCollector.expectEquals("Capture result preset curve should match request", 2557 requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE), 2558 result.get(CaptureResult.TONEMAP_PRESET_CURVE)); 2559 } 2560 2561 // Tonemap curve result availability and basic validity check for all modes. 2562 mCollector.expectValuesInRange("Tonemap curve red values are out of range", 2563 CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE); 2564 mCollector.expectInRange("Tonemap curve red length is out of range", 2565 mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2566 mCollector.expectValuesInRange("Tonemap curve green values are out of range", 2567 CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE); 2568 mCollector.expectInRange("Tonemap curve green length is out of range", 2569 mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2570 mCollector.expectValuesInRange("Tonemap curve blue values are out of range", 2571 CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE); 2572 mCollector.expectInRange("Tonemap curve blue length is out of range", 2573 mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2574 2575 // Make sure capture result tonemap has identical channels. 2576 if (mStaticInfo.isMonochromeCamera()) { 2577 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2578 "have same dimension for all channels", mapRed.length, mapGreen.length); 2579 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2580 "have same dimension for all channels", mapRed.length, mapBlue.length); 2581 2582 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) { 2583 boolean isIdentical = true; 2584 for (int j = 0; j < mapRed.length; j++) { 2585 isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]); 2586 if (!isIdentical) 2587 break; 2588 } 2589 mCollector.expectTrue("Capture result tonemap of monochrome camera should " + 2590 "be identical between all channels", isIdentical); 2591 } 2592 } 2593 } 2594 stopPreview(); 2595 } 2596 2597 /** 2598 * Test awb mode control. 2599 * <p> 2600 * Test each supported AWB mode, verify the AWB mode in capture result 2601 * matches request. When AWB is locked, the color correction gains and 2602 * transform should remain unchanged. 2603 * </p> 2604 */ 2605 private void awbModeAndLockTestByCamera() throws Exception { 2606 int[] awbModes = mStaticInfo.getAwbAvailableModesChecked(); 2607 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2608 boolean canSetAwbLock = mStaticInfo.isAwbLockSupported(); 2609 CaptureRequest.Builder requestBuilder = 2610 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2611 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2612 2613 for (int mode : awbModes) { 2614 SimpleCaptureCallback listener; 2615 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode); 2616 listener = new SimpleCaptureCallback(); 2617 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2618 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2619 2620 // Verify AWB mode in capture result. 2621 verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener, 2622 NUM_FRAMES_VERIFIED); 2623 2624 if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) { 2625 // Verify color correction transform and gains stay unchanged after a lock. 2626 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2627 listener = new SimpleCaptureCallback(); 2628 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2629 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2630 2631 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) { 2632 waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE, 2633 CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT); 2634 } 2635 2636 } 2637 // Don't verify auto mode result if AWB lock is not supported 2638 if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) { 2639 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED); 2640 } 2641 } 2642 } 2643 2644 private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener, 2645 int numFramesVerified) { 2646 // Skip check if cc gains/transform/mode are not available 2647 if (!mStaticInfo.areKeysAvailable( 2648 CaptureResult.COLOR_CORRECTION_GAINS, 2649 CaptureResult.COLOR_CORRECTION_TRANSFORM, 2650 CaptureResult.COLOR_CORRECTION_MODE)) { 2651 return; 2652 } 2653 2654 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2655 RggbChannelVector lockedGains = 2656 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2657 ColorSpaceTransform lockedTransform = 2658 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2659 2660 for (int i = 0; i < numFramesVerified; i++) { 2661 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2662 // Color correction mode check is skipped here, as it is checked in colorCorrectionTest. 2663 validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 2664 2665 RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2666 ColorSpaceTransform transform = 2667 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2668 mCollector.expectEquals("Color correction gains should remain unchanged after awb lock", 2669 lockedGains, gains); 2670 mCollector.expectEquals("Color correction transform should remain unchanged after" 2671 + " awb lock", lockedTransform, transform); 2672 } 2673 } 2674 2675 /** 2676 * Test AF mode control. 2677 * <p> 2678 * Test all supported AF modes, verify the AF mode in capture result matches 2679 * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode, 2680 * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED 2681 * state within certain amount of frames. 2682 * </p> 2683 */ 2684 private void afModeTestByCamera() throws Exception { 2685 int[] afModes = mStaticInfo.getAfAvailableModesChecked(); 2686 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2687 CaptureRequest.Builder requestBuilder = 2688 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2689 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2690 2691 for (int mode : afModes) { 2692 SimpleCaptureCallback listener; 2693 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode); 2694 listener = new SimpleCaptureCallback(); 2695 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2696 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2697 2698 // Verify AF mode in capture result. 2699 verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener, 2700 NUM_FRAMES_VERIFIED); 2701 2702 // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes. 2703 // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily 2704 // result in a passive AF call if the camera has already been focused, and the scene has 2705 // not changed enough to trigger an AF pass. Skip this constraint for LEGACY. 2706 if (mStaticInfo.isHardwareLevelAtLeastLimited() && 2707 (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || 2708 mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { 2709 List<Integer> afStateList = new ArrayList<Integer>(); 2710 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); 2711 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); 2712 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList, 2713 NUM_RESULTS_WAIT_TIMEOUT); 2714 } 2715 } 2716 } 2717 2718 /** 2719 * Test video and optical stabilizations if they are supported by a given camera. 2720 */ 2721 private void stabilizationTestByCamera() throws Exception { 2722 // video stabilization test. 2723 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys(); 2724 2725 Integer[] videoStabModes = (keys.contains(CameraCharacteristics. 2726 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ? 2727 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) : 2728 new Integer[0]; 2729 int[] opticalStabModes = (keys.contains( 2730 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ? 2731 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0]; 2732 2733 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2734 CaptureRequest.Builder requestBuilder = 2735 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2736 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2737 startPreview(requestBuilder, maxPreviewSize, listener); 2738 2739 for (Integer mode : videoStabModes) { 2740 listener = new SimpleCaptureCallback(); 2741 requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode); 2742 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2743 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2744 // Video stabilization could return any modes. 2745 verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE, 2746 videoStabModes, listener, NUM_FRAMES_VERIFIED); 2747 } 2748 2749 for (int mode : opticalStabModes) { 2750 listener = new SimpleCaptureCallback(); 2751 requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode); 2752 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2753 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2754 verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode, 2755 listener, NUM_FRAMES_VERIFIED); 2756 } 2757 2758 stopPreview(); 2759 } 2760 2761 private void digitalZoomTestByCamera(Size previewSize, boolean repeating) throws Exception { 2762 final PointF[] TEST_ZOOM_CENTERS; 2763 final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked(); 2764 final float ZOOM_ERROR_MARGIN = 0.01f; 2765 if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) { 2766 // It doesn't make much sense to test the zoom if the device effectively supports 2767 // no zoom. 2768 return; 2769 } 2770 2771 final int croppingType = mStaticInfo.getScalerCroppingTypeChecked(); 2772 if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) { 2773 // Set the four corners in a way that the minimally allowed zoom factor is 2x. 2774 float normalizedLeft = 0.25f; 2775 float normalizedTop = 0.25f; 2776 float normalizedRight = 0.75f; 2777 float normalizedBottom = 0.75f; 2778 // If the max supported zoom is too small, make sure we at least test the max 2779 // Zoom is tested for the four corners. 2780 if (maxZoom < 2.0f) { 2781 normalizedLeft = 0.5f / maxZoom; 2782 normalizedTop = 0.5f / maxZoom; 2783 normalizedRight = 1.0f - normalizedLeft; 2784 normalizedBottom = 1.0f - normalizedTop; 2785 } 2786 TEST_ZOOM_CENTERS = new PointF[] { 2787 new PointF(0.5f, 0.5f), // Center point 2788 new PointF(normalizedLeft, normalizedTop), // top left corner zoom 2789 new PointF(normalizedRight, normalizedTop), // top right corner zoom 2790 new PointF(normalizedLeft, normalizedBottom), // bottom left corner zoom 2791 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom 2792 }; 2793 2794 if (VERBOSE) { 2795 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM"); 2796 } 2797 } else { 2798 // CENTER_ONLY 2799 TEST_ZOOM_CENTERS = new PointF[] { 2800 new PointF(0.5f, 0.5f), // Center point 2801 }; 2802 2803 if (VERBOSE) { 2804 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY"); 2805 } 2806 } 2807 2808 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 2809 final Rect defaultCropRegion = new Rect(0, 0, 2810 activeArraySize.width(), activeArraySize.height()); 2811 Rect[] cropRegions = new Rect[ZOOM_STEPS]; 2812 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 2813 CaptureRequest.Builder requestBuilder = 2814 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2815 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2816 2817 updatePreviewSurface(previewSize); 2818 configurePreviewOutput(requestBuilder); 2819 2820 CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS]; 2821 2822 // Set algorithm regions 2823 final int METERING_RECT_RATIO = 10; 2824 final MeteringRectangle[][] defaultMeteringRects = new MeteringRectangle[][] { 2825 { 2826 new MeteringRectangle ( 2827 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 2828 /*meteringWeight*/1), /* full active region */ 2829 }, 2830 { 2831 new MeteringRectangle ( 2832 /*x*/0, /*y*/0, activeArraySize.width()/METERING_RECT_RATIO, 2833 activeArraySize.height()/METERING_RECT_RATIO, 2834 /*meteringWeight*/1), 2835 }, 2836 { 2837 new MeteringRectangle ( 2838 /*x*/(int)(activeArraySize.width() * (0.5f - 0.5f/METERING_RECT_RATIO)), 2839 /*y*/(int)(activeArraySize.height() * (0.5f - 0.5f/METERING_RECT_RATIO)), 2840 activeArraySize.width()/METERING_RECT_RATIO, 2841 activeArraySize.height()/METERING_RECT_RATIO, 2842 /*meteringWeight*/1), 2843 }, 2844 }; 2845 2846 final int CAPTURE_SUBMIT_REPEAT; 2847 final int NUM_RESULTS_TO_SKIP; 2848 { 2849 int maxLatency = mStaticInfo.getSyncMaxLatency(); 2850 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 2851 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2852 } else { 2853 CAPTURE_SUBMIT_REPEAT = maxLatency + 1; 2854 } 2855 if (repeating) { 2856 NUM_RESULTS_TO_SKIP = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2857 } else { 2858 NUM_RESULTS_TO_SKIP = CAPTURE_SUBMIT_REPEAT - 1; 2859 } 2860 } 2861 2862 if (VERBOSE) { 2863 Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT); 2864 } 2865 2866 for (MeteringRectangle[] meteringRect : defaultMeteringRects) { 2867 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2868 update3aRegion(requestBuilder, algo, meteringRect, mStaticInfo); 2869 } 2870 2871 for (PointF center : TEST_ZOOM_CENTERS) { 2872 Rect previousCrop = null; 2873 2874 for (int i = 0; i < ZOOM_STEPS; i++) { 2875 /* 2876 * Submit capture request 2877 */ 2878 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS); 2879 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, 2880 maxZoom, defaultCropRegion); 2881 if (VERBOSE) { 2882 Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " + 2883 center + " The cropRegion is " + cropRegions[i] + 2884 " Preview size is " + previewSize + ", repeating is " + repeating); 2885 } 2886 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]); 2887 requests[i] = requestBuilder.build(); 2888 if (VERBOSE) { 2889 Log.v(TAG, "submit crop region " + cropRegions[i]); 2890 } 2891 if (repeating) { 2892 mSession.setRepeatingRequest(requests[i], listener, mHandler); 2893 // Drop first few frames 2894 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); 2895 // Interleave a regular capture 2896 mSession.capture(requests[0], listener, mHandler); 2897 } else { 2898 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) { 2899 mSession.capture(requests[i], listener, mHandler); 2900 } 2901 } 2902 2903 /* 2904 * Validate capture result 2905 */ 2906 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); // Drop first few frames 2907 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 2908 requests[i], NUM_RESULTS_WAIT_TIMEOUT); 2909 List<CaptureResult> partialResults = result.getPartialResults(); 2910 2911 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 2912 for (CaptureResult partialResult : partialResults) { 2913 Rect cropRegionInPartial = 2914 partialResult.get(CaptureResult.SCALER_CROP_REGION); 2915 if (cropRegionInPartial != null) { 2916 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 2917 + "match in final result", cropRegionInPartial, cropRegion); 2918 } 2919 } 2920 2921 if (CameraTestUtils.isStabilizationOff(requests[i])) { 2922 /* 2923 * Validate resulting crop regions 2924 */ 2925 if (previousCrop != null) { 2926 Rect currentCrop = cropRegion; 2927 mCollector.expectTrue(String.format( 2928 "Crop region should shrink or stay the same " 2929 + "(previous = %s, current = %s)", 2930 previousCrop, currentCrop), 2931 previousCrop.equals(currentCrop) 2932 || (previousCrop.width() > currentCrop.width() 2933 && previousCrop.height() > currentCrop.height())); 2934 } 2935 2936 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 2937 mCollector.expectRectsAreSimilar( 2938 "Request and result crop region should be similar", 2939 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA); 2940 } 2941 2942 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) { 2943 mCollector.expectRectCentered( 2944 "Result crop region should be centered inside the active array", 2945 new Size(activeArraySize.width(), activeArraySize.height()), 2946 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED); 2947 } 2948 2949 /* 2950 * Validate resulting metering regions 2951 */ 2952 2953 // Use the actual reported crop region to calculate the resulting 2954 // metering region 2955 expectRegions[i] = getExpectedOutputRegion( 2956 /*requestRegion*/meteringRect, 2957 /*cropRect*/ cropRegion); 2958 2959 // Verify Output 3A region is intersection of input 3A region and 2960 // crop region 2961 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2962 validate3aRegion(result, partialResults, algo, expectRegions[i], 2963 false/*scaleByZoomRatio*/, mStaticInfo); 2964 } 2965 } 2966 2967 previousCrop = cropRegion; 2968 } 2969 2970 if (maxZoom > 1.0f) { 2971 mCollector.expectTrue( 2972 String.format("Most zoomed-in crop region should be smaller " + 2973 "than active array w/h" + 2974 "(last crop = %s, active array = %s)", 2975 previousCrop, activeArraySize), 2976 (previousCrop.width() < activeArraySize.width() && 2977 previousCrop.height() < activeArraySize.height())); 2978 } 2979 } 2980 } 2981 } 2982 2983 private void zoomRatioTestByCamera(Size previewSize) throws Exception { 2984 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 2985 // The error margin is derive from a VGA size camera zoomed all the way to 10x, in which 2986 // case the cropping error can be as large as 480/46 - 480/48 = 0.435. 2987 final float ZOOM_ERROR_MARGIN = 0.05f; 2988 2989 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 2990 final Rect defaultCropRegion = 2991 new Rect(0, 0, activeArraySize.width(), activeArraySize.height()); 2992 final Rect zoom2xCropRegion = 2993 new Rect(activeArraySize.width()/4, activeArraySize.height()/4, 2994 activeArraySize.width()*3/4, activeArraySize.height()*3/4); 2995 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 2996 CaptureRequest.Builder requestBuilder = 2997 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2998 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 2999 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3000 3001 updatePreviewSurface(previewSize); 3002 configurePreviewOutput(requestBuilder); 3003 3004 // Set algorithm regions to full active region 3005 final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] { 3006 new MeteringRectangle ( 3007 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 3008 /*meteringWeight*/1) 3009 }; 3010 3011 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3012 update3aRegion(requestBuilder, algo, defaultMeteringRect, mStaticInfo); 3013 } 3014 3015 final int captureSubmitRepeat; 3016 { 3017 int maxLatency = mStaticInfo.getSyncMaxLatency(); 3018 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3019 captureSubmitRepeat = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3020 } else { 3021 captureSubmitRepeat = maxLatency + 1; 3022 } 3023 } 3024 3025 float previousRatio = zoomRatioRange.getLower(); 3026 for (int i = 0; i < ZOOM_STEPS; i++) { 3027 /* 3028 * Submit capture request 3029 */ 3030 float zoomFactor = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() - 3031 zoomRatioRange.getLower()) * i / ZOOM_STEPS; 3032 if (VERBOSE) { 3033 Log.v(TAG, "Testing Zoom ratio " + zoomFactor + " Preview size is " + previewSize); 3034 } 3035 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3036 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 3037 CaptureRequest request = requestBuilder.build(); 3038 for (int j = 0; j < captureSubmitRepeat; ++j) { 3039 mSession.capture(request, listener, mHandler); 3040 } 3041 3042 /* 3043 * Validate capture result 3044 */ 3045 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3046 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 3047 request, NUM_RESULTS_WAIT_TIMEOUT); 3048 List<CaptureResult> partialResults = result.getPartialResults(); 3049 float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3050 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 3051 3052 for (CaptureResult partialResult : partialResults) { 3053 Rect cropRegionInPartial = 3054 partialResult.get(CaptureResult.SCALER_CROP_REGION); 3055 if (cropRegionInPartial != null) { 3056 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 3057 + "match in final result", cropRegionInPartial, cropRegion); 3058 } 3059 3060 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO); 3061 if (zoomRatioInPartial != null) { 3062 mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match" 3063 + " that in final result", resultZoomRatio, zoomRatioInPartial); 3064 } 3065 } 3066 3067 /* 3068 * Validate resulting crop regions and zoom ratio 3069 */ 3070 mCollector.expectTrue(String.format( 3071 "Zoom ratio should increase or stay the same " + 3072 "(previous = %f, current = %f)", 3073 previousRatio, resultZoomRatio), 3074 Math.abs(previousRatio - resultZoomRatio) < ZOOM_ERROR_MARGIN || 3075 (previousRatio < resultZoomRatio)); 3076 3077 if (CameraTestUtils.isStabilizationOff(request)) { 3078 mCollector.expectTrue(String.format( 3079 "Request and result zoom ratio should be similar " 3080 + "(requested = %f, result = %f", zoomFactor, resultZoomRatio), 3081 Math.abs(zoomFactor - resultZoomRatio) / zoomFactor <= ZOOM_ERROR_MARGIN); 3082 3083 //In case zoom ratio is converted to crop region at HAL, due to error magnification 3084 //when converting to post-zoom crop region, scale the error threshold for crop 3085 //region check. 3086 float errorMultiplier = Math.max(1.0f, zoomFactor); 3087 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3088 mCollector.expectRectsAreSimilar( 3089 "Request and result crop region should be similar", 3090 defaultCropRegion, cropRegion, 3091 CROP_REGION_ERROR_PERCENT_DELTA * errorMultiplier); 3092 } 3093 3094 mCollector.expectRectCentered( 3095 "Result crop region should be centered inside the active array", 3096 new Size(activeArraySize.width(), activeArraySize.height()), 3097 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED * errorMultiplier); 3098 3099 /* 3100 * Validate resulting metering regions 3101 */ 3102 // Use the actual reported crop region to calculate the resulting metering region 3103 expectRegions[i] = getExpectedOutputRegion( 3104 /*requestRegion*/defaultMeteringRect, 3105 /*cropRect*/ cropRegion); 3106 3107 // Verify Output 3A region is intersection of input 3A region and crop region 3108 boolean scaleByZoomRatio = zoomFactor > 1.0f; 3109 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3110 validate3aRegion(result, partialResults, algo, expectRegions[i], 3111 scaleByZoomRatio, mStaticInfo); 3112 } 3113 } 3114 3115 previousRatio = resultZoomRatio; 3116 3117 /* 3118 * Set windowboxing cropRegion while zoomRatio is not 1.0x, and make sure the crop 3119 * region was overwritten. 3120 */ 3121 if (zoomFactor != 1.0f) { 3122 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom2xCropRegion); 3123 CaptureRequest requestWithCrop = requestBuilder.build(); 3124 for (int j = 0; j < captureSubmitRepeat; ++j) { 3125 mSession.capture(requestWithCrop, listener, mHandler); 3126 } 3127 3128 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3129 CaptureResult resultWithCrop = listener.getCaptureResultForRequest( 3130 requestWithCrop, NUM_RESULTS_WAIT_TIMEOUT); 3131 float resultZoomRatioWithCrop = getValueNotNull(resultWithCrop, 3132 CaptureResult.CONTROL_ZOOM_RATIO); 3133 Rect cropRegionWithCrop = getValueNotNull(resultWithCrop, 3134 CaptureResult.SCALER_CROP_REGION); 3135 3136 mCollector.expectTrue(String.format( 3137 "Result zoom ratio should remain the same (activeArrayCrop: %f, " + 3138 "zoomedCrop: %f)", resultZoomRatio, resultZoomRatioWithCrop), 3139 Math.abs(resultZoomRatio - resultZoomRatioWithCrop) < ZOOM_ERROR_MARGIN); 3140 3141 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3142 mCollector.expectRectsAreSimilar( 3143 "Result crop region should remain the same with or without crop", 3144 cropRegion, cropRegionWithCrop, CROP_REGION_ERROR_PERCENT_DELTA); 3145 } 3146 } 3147 } 3148 } 3149 3150 private void zoomTimestampIncreaseTestByCamera() throws Exception { 3151 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3152 3153 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3154 updatePreviewSurface(maxPreviewSize); 3155 CaptureRequest.Builder requestBuilder = 3156 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3157 configurePreviewOutput(requestBuilder); 3158 3159 // Submit a sequence of requests first zooming in then zooming out. 3160 List<CaptureRequest> requests = new ArrayList<CaptureRequest>(); 3161 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3162 float zoomRange = zoomRatioRange.getUpper() - zoomRatioRange.getLower(); 3163 for (int i = 0; i <= ZOOM_STEPS; i++) { 3164 float zoomFactor = zoomRatioRange.getUpper() - (zoomRange * i / ZOOM_STEPS); 3165 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3166 // Add each ratio to both the beginning and end of the list. 3167 requests.add(requestBuilder.build()); 3168 requests.add(0, requestBuilder.build()); 3169 } 3170 int seqId = mSession.captureBurst(requests, listener, mHandler); 3171 3172 // onCaptureSequenceCompleted() trails all capture results. Upon its return, 3173 // we make sure we've received all results/errors. 3174 listener.getCaptureSequenceLastFrameNumber( 3175 seqId, WAIT_FOR_RESULT_TIMEOUT_MS * ZOOM_STEPS); 3176 // Check timestamp monotonically increase for the whole sequence 3177 long prevTimestamp = 0; 3178 while (listener.hasMoreResults()) { 3179 TotalCaptureResult result = listener.getTotalCaptureResult( 3180 WAIT_FOR_RESULT_TIMEOUT_MS); 3181 long timestamp = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3182 mCollector.expectGreater("Sensor timestamp must monotonically increase, " 3183 + "but changed from " + prevTimestamp + " to " + timestamp, 3184 prevTimestamp, timestamp); 3185 prevTimestamp = timestamp; 3186 } 3187 } 3188 3189 private void digitalZoomPreviewCombinationTestByCamera() throws Exception { 3190 final double ASPECT_RATIO_THRESHOLD = 0.001; 3191 List<Double> aspectRatiosTested = new ArrayList<Double>(); 3192 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3193 aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight()); 3194 3195 for (Size size : mOrderedPreviewSizes) { 3196 // Max preview size was already tested in testDigitalZoom test. skip it. 3197 if (size.equals(maxPreviewSize)) { 3198 continue; 3199 } 3200 3201 // Only test the largest size for each aspect ratio. 3202 double aspectRatio = (double)(size.getWidth()) / size.getHeight(); 3203 if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) { 3204 continue; 3205 } 3206 3207 if (VERBOSE) { 3208 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom"); 3209 } 3210 3211 aspectRatiosTested.add(aspectRatio); 3212 digitalZoomTestByCamera(size, /*repeating*/false); 3213 } 3214 } 3215 3216 private static boolean isAspectRatioContained(List<Double> aspectRatioList, 3217 double aspectRatio, double delta) { 3218 for (Double ratio : aspectRatioList) { 3219 if (Math.abs(ratio - aspectRatio) < delta) { 3220 return true; 3221 } 3222 } 3223 3224 return false; 3225 } 3226 3227 private void sceneModeTestByCamera() throws Exception { 3228 int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked(); 3229 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3230 CaptureRequest.Builder requestBuilder = 3231 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3232 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3233 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE); 3234 startPreview(requestBuilder, maxPreviewSize, listener); 3235 3236 for(int mode : sceneModes) { 3237 requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode); 3238 listener = new SimpleCaptureCallback(); 3239 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3240 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3241 3242 verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE, 3243 mode, listener, NUM_FRAMES_VERIFIED); 3244 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3245 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3246 CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED); 3247 } 3248 } 3249 3250 private void effectModeTestByCamera() throws Exception { 3251 int[] effectModes = mStaticInfo.getAvailableEffectModesChecked(); 3252 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3253 CaptureRequest.Builder requestBuilder = 3254 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3255 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 3256 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3257 startPreview(requestBuilder, maxPreviewSize, listener); 3258 3259 for(int mode : effectModes) { 3260 requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode); 3261 listener = new SimpleCaptureCallback(); 3262 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3263 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3264 3265 verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE, 3266 mode, listener, NUM_FRAMES_VERIFIED); 3267 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3268 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3269 CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED); 3270 } 3271 } 3272 3273 private void extendedSceneModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 3274 Capability[] extendedSceneModeCaps = mStaticInfo.getAvailableExtendedSceneModeCapsChecked(); 3275 if (extendedSceneModeCaps.length == 0) { 3276 return; 3277 } 3278 3279 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3280 CaptureRequest.Builder requestBuilder = 3281 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3282 3283 for (Capability cap : extendedSceneModeCaps) { 3284 int mode = cap.getMode(); 3285 requestBuilder.set(CaptureRequest.CONTROL_EXTENDED_SCENE_MODE, mode); 3286 3287 // Test that DISABLED and BOKEH_CONTINUOUS mode doesn't slow down the frame rate 3288 if (mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_DISABLED || 3289 mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) { 3290 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 3291 } 3292 3293 Range<Float> zoomRange = cap.getZoomRatioRange(); 3294 float[] zoomRatios = new float[]{zoomRange.getLower(), zoomRange.getUpper()}; 3295 for (float ratio : zoomRatios) { 3296 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3297 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, ratio); 3298 startPreview(requestBuilder, maxPreviewSize, listener); 3299 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3300 3301 verifyCaptureResultForKey(CaptureResult.CONTROL_EXTENDED_SCENE_MODE, 3302 mode, listener, NUM_FRAMES_VERIFIED); 3303 float zoomRatioDelta = ZOOM_RATIO_ERROR_PERCENT_DELTA * ratio; 3304 if (CameraTestUtils.isStabilizationOff(requestBuilder.build())) { 3305 verifyCaptureResultForKey(CaptureResult.CONTROL_ZOOM_RATIO, 3306 ratio, listener, NUM_FRAMES_VERIFIED, zoomRatioDelta); 3307 } 3308 } 3309 } 3310 } 3311 3312 private void manualFlashStrengthControlTestByCamera() throws Exception { 3313 Size maxPrevSize = mOrderedPreviewSizes.get(0); 3314 int singleMaxLevel = mStaticInfo.getCharacteristics().get( 3315 CameraCharacteristics.FLASH_SINGLE_STRENGTH_MAX_LEVEL); 3316 int torchMaxLevel = mStaticInfo.getCharacteristics().get( 3317 CameraCharacteristics.FLASH_TORCH_STRENGTH_MAX_LEVEL); 3318 int strengthLevel = singleMaxLevel - 1; 3319 3320 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 3321 CaptureRequest.Builder requestBuilder = 3322 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3323 3324 // Single mode 3325 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 3326 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 3327 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3328 3329 CaptureRequest request; 3330 updatePreviewSurface(maxPrevSize); 3331 configurePreviewOutput(requestBuilder); 3332 request = requestBuilder.build(); 3333 mSession.capture(request, resultListener, mHandler); 3334 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3335 CaptureResult result = 3336 resultListener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 3337 int resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3338 assertTrue(resultStrengthLevel == strengthLevel); 3339 assertTrue(resultStrengthLevel <= singleMaxLevel); 3340 3341 // Torch mode 3342 strengthLevel = torchMaxLevel - 1; 3343 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 3344 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3345 CaptureRequest torchRequest = requestBuilder.build(); 3346 mSession.setRepeatingRequest(torchRequest, resultListener, mHandler); 3347 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3348 result = resultListener.getCaptureResultForRequest( 3349 torchRequest, NUM_RESULTS_WAIT_TIMEOUT); 3350 resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3351 assertTrue(resultStrengthLevel == strengthLevel); 3352 assertTrue(resultStrengthLevel <= torchMaxLevel); 3353 } 3354 3355 private void autoframingTestByCamera() throws Exception { 3356 // Verify autoframing state, zoom ratio and video stabilizations controls for autoframing 3357 // modes ON and OFF 3358 int[] autoframingModes = {CameraMetadata.CONTROL_AUTOFRAMING_OFF, 3359 CameraMetadata.CONTROL_AUTOFRAMING_ON}; 3360 final int zoomSteps = 5; 3361 final float zoomErrorMargin = 0.05f; 3362 final int kMaxNumFrames = 200; 3363 Size maxPreviewSize = mOrderedPreviewSizes.get(0); // Max preview size. 3364 CaptureRequest.Builder requestBuilder = 3365 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3366 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3367 startPreview(requestBuilder, maxPreviewSize, listener); 3368 3369 for (int mode : autoframingModes) { 3370 float expectedZoomRatio = 0.0f; 3371 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3372 for (int i = 0; i < zoomSteps; i++) { 3373 float testZoomRatio = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() 3374 - zoomRatioRange.getLower()) * i / zoomSteps; 3375 // Zoom ratio 1.0f is a special case. The ZoomRatioMapper in framework maintains the 3376 // 1.0f ratio in the CaptureResult 3377 if (testZoomRatio == 1.0f) { 3378 continue; 3379 } 3380 requestBuilder.set(CaptureRequest.CONTROL_AUTOFRAMING, mode); 3381 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, testZoomRatio); 3382 listener = new SimpleCaptureCallback(); 3383 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3384 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3385 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3386 Float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3387 int autoframingState = getValueNotNull(result, 3388 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3389 int videoStabilizationMode = getValueNotNull(result, 3390 CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE); 3391 3392 if (mode == CameraMetadata.CONTROL_AUTOFRAMING_ON) { 3393 int numFrames = 0; 3394 while (numFrames < kMaxNumFrames) { 3395 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3396 autoframingState = getValueNotNull(result, 3397 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3398 assertTrue("Autoframing state should be FRAMING or CONVERGED when " 3399 + "AUTOFRAMING is ON", 3400 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING 3401 || autoframingState 3402 == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED); 3403 3404 assertTrue("Video Stablization should be OFF when AUTOFRAMING is ON", 3405 videoStabilizationMode 3406 == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 3407 3408 resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3409 if (autoframingState == 3410 CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED) { 3411 break; 3412 } 3413 numFrames++; 3414 } 3415 3416 if (autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED 3417 && expectedZoomRatio == 0.0f) { 3418 expectedZoomRatio = resultZoomRatio; 3419 } 3420 } else { 3421 expectedZoomRatio = testZoomRatio; 3422 assertTrue("Autoframing state should be INACTIVE when AUTOFRAMING is OFF", 3423 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_INACTIVE); 3424 } 3425 3426 verifyCaptureResultForKey(CaptureResult.CONTROL_AUTOFRAMING, mode, listener, 3427 NUM_FRAMES_VERIFIED); 3428 3429 // If autoframing was OFF, or the framing state CONVERGED, the zoom ratio in result 3430 // should be within the margin of error. 3431 if (autoframingState != CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING) { 3432 mCollector.expectTrue(String.format( 3433 "Zoom Ratio in Capture Request does not match the expected zoom" 3434 + "ratio in Capture Result (expected = %f, actual = %f)", 3435 expectedZoomRatio, resultZoomRatio), 3436 Math.abs(expectedZoomRatio - resultZoomRatio) / expectedZoomRatio 3437 <= zoomErrorMargin); 3438 } 3439 } 3440 } 3441 } 3442 3443 private void settingsOverrideTestByCamera() throws Exception { 3444 // Verify that settings override is OFF by default 3445 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3446 CaptureRequest.Builder requestBuilder = 3447 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3448 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3449 startPreview(requestBuilder, maxPreviewSize, listener); 3450 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3451 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3452 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_OFF, listener, NUM_FRAMES_VERIFIED); 3453 3454 // Turn settings override to ZOOM, and make sure it's reflected in result 3455 requestBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE, 3456 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM); 3457 SimpleCaptureCallback listenerZoom = new SimpleCaptureCallback(); 3458 mSession.setRepeatingRequest(requestBuilder.build(), listenerZoom, mHandler); 3459 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3460 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3461 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3462 3463 // Verify that settings override result is ON if turned on from the beginning 3464 listenerZoom = new SimpleCaptureCallback(); 3465 stopPreviewAndDrain(); 3466 startPreview(requestBuilder, maxPreviewSize, listenerZoom); 3467 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3468 // Wait additional 2 frames to allow non-overridden 3469 // results during startup. 3470 final int ZOOM_SOME_FRAMES = 2; 3471 waitForNumResults(listenerZoom, ZOOM_SOME_FRAMES); 3472 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3473 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3474 } 3475 3476 private void testAeModeOnLowLightBoostBrightnessPriorityTestByCamera() throws Exception { 3477 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3478 CaptureRequest.Builder requestBuilder = 3479 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3480 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, 3481 CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3482 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3483 startPreview(requestBuilder, maxPreviewSize, listener); 3484 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3485 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3486 // Expect that AE_MODE is ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY 3487 int resultAeMode = getValueNotNull(result, CaptureResult.CONTROL_AE_MODE); 3488 assertTrue("AE Mode should be ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY", resultAeMode 3489 == CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3490 3491 // Expect that CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE is present 3492 int resultLowLightBoostState = 3493 getValueNotNull(result, CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE); 3494 assertTrue("Low Light Boost State should be ACTIVE or INACTIVE", 3495 resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_INACTIVE 3496 || resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_ACTIVE); 3497 } 3498 3499 //---------------------------------------------------------------- 3500 //---------Below are common functions for all tests.-------------- 3501 //---------------------------------------------------------------- 3502 3503 /** 3504 * Enable exposure manual control and change exposure and sensitivity and 3505 * clamp the value into the supported range. 3506 */ 3507 private void changeExposure(CaptureRequest.Builder requestBuilder, 3508 long expTime, int sensitivity) { 3509 // Check if the max analog sensitivity is available and no larger than max sensitivity. The 3510 // max analog sensitivity is not actually used here. This is only an extra correctness 3511 // check. 3512 mStaticInfo.getMaxAnalogSensitivityChecked(); 3513 3514 expTime = mStaticInfo.getExposureClampToRange(expTime); 3515 sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity); 3516 3517 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 3518 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime); 3519 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 3520 } 3521 /** 3522 * Enable exposure manual control and change exposure time and 3523 * clamp the value into the supported range. 3524 * 3525 * <p>The sensitivity is set to default value.</p> 3526 */ 3527 private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) { 3528 changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY); 3529 } 3530 3531 /** 3532 * Get the exposure time array that contains multiple exposure time steps in 3533 * the exposure time range, in nanoseconds. 3534 */ 3535 private long[] getExposureTimeTestValuesSorted() { 3536 long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1]; 3537 long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS); 3538 long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS); 3539 3540 long range = maxExpTime - minExpTime; 3541 double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS; 3542 for (int i = 0; i < testValues.length; i++) { 3543 // Multiply stepSize by largest to smallest so that the final array is sorted. 3544 testValues[i] = maxExpTime - (long) (stepSize * (testValues.length - 1 - i)); 3545 testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]); 3546 } 3547 3548 return testValues; 3549 } 3550 3551 /** 3552 * Generate test focus distances in range of [0, minFocusDistance] in increasing order. 3553 * 3554 * @param repeatMin number of times minValue will be repeated. 3555 * @param repeatMax number of times maxValue will be repeated. 3556 */ 3557 private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) { 3558 int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax; 3559 float[] testValues = new float[totalCount]; 3560 float minValue = 0; 3561 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 3562 3563 float range = maxValue - minValue; 3564 float stepSize = range / NUM_TEST_FOCUS_DISTANCES; 3565 3566 for (int i = 0; i < repeatMin; i++) { 3567 testValues[i] = minValue; 3568 } 3569 for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) { 3570 testValues[repeatMin+i] = minValue + stepSize * i; 3571 } 3572 for (int i = 0; i < repeatMax; i++) { 3573 testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] = 3574 maxValue; 3575 } 3576 3577 return testValues; 3578 } 3579 3580 /** 3581 * Get the sensitivity array that contains multiple sensitivity steps in the 3582 * sensitivity range. 3583 * <p> 3584 * Sensitivity number of test values is determined by 3585 * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and 3586 * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}. 3587 * </p> 3588 */ 3589 private int[] getSensitivityTestValuesSorted() { 3590 int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault( 3591 DEFAULT_SENSITIVITY); 3592 int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault( 3593 DEFAULT_SENSITIVITY); 3594 3595 int range = maxSensitivity - minSensitivity; 3596 int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE; 3597 int numSteps = range / stepSize; 3598 // Bound the test steps to avoid supper long test. 3599 if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) { 3600 numSteps = DEFAULT_NUM_SENSITIVITY_STEPS; 3601 stepSize = range / numSteps; 3602 } 3603 int[] testValues = new int[numSteps + 1]; 3604 for (int i = 0; i < testValues.length; i++) { 3605 // Multiply stepSize by largest to smallest so that the final array is sorted. 3606 testValues[i] = maxSensitivity - stepSize * (testValues.length - 1 - i); 3607 testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]); 3608 } 3609 3610 return testValues; 3611 } 3612 3613 /** 3614 * Validate the AE manual control exposure time. 3615 * 3616 * <p>Exposure should be close enough, and only round down if they are not equal.</p> 3617 * 3618 * @param request Request exposure time 3619 * @param result Result exposure time 3620 */ 3621 private void validateExposureTime(long request, long result) { 3622 long expTimeDelta = request - result; 3623 long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request 3624 * EXPOSURE_TIME_ERROR_MARGIN_RATE)); 3625 // First, round down not up, second, need close enough. 3626 mCollector.expectTrue("Exposure time is invalid for AE manual control test, request: " 3627 + request + " result: " + result, 3628 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0); 3629 } 3630 3631 /** 3632 * Validate AE manual control sensitivity. 3633 * 3634 * @param request Request sensitivity 3635 * @param result Result sensitivity 3636 */ 3637 private void validateSensitivity(int request, int result) { 3638 float sensitivityDelta = request - result; 3639 float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE; 3640 // First, round down not up, second, need close enough. 3641 mCollector.expectTrue("Sensitivity is invalid for AE manual control test, request: " 3642 + request + " result: " + result, 3643 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0); 3644 } 3645 3646 /** 3647 * Validate frame duration for a given capture. 3648 * 3649 * <p>Frame duration should be longer than exposure time.</p> 3650 * 3651 * @param result The capture result for a given capture 3652 */ 3653 private void validateFrameDurationForCapture(CaptureResult result) { 3654 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3655 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3656 if (VERBOSE) { 3657 Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime); 3658 } 3659 3660 mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure" 3661 + " time (%d) for a given capture", frameDuration, expTime), 3662 frameDuration >= expTime); 3663 3664 validatePipelineDepth(result); 3665 } 3666 3667 /** 3668 * Basic verification for the control mode capture result. 3669 * 3670 * @param key The capture result key to be verified against 3671 * @param requestMode The request mode for this result 3672 * @param listener The capture listener to get capture results 3673 * @param numFramesVerified The number of capture results to be verified 3674 * @param threshold The threshold by which the request and result keys can differ 3675 */ 3676 private void verifyCaptureResultForKey(CaptureResult.Key<Float> key, float requestMode, 3677 SimpleCaptureCallback listener, int numFramesVerified, float threshold) { 3678 for (int i = 0; i < numFramesVerified; i++) { 3679 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3680 validatePipelineDepth(result); 3681 float resultMode = getValueNotNull(result, key); 3682 if (VERBOSE) { 3683 Log.v(TAG, "Expect value: " + requestMode + " result value: " 3684 + resultMode + " threshold " + threshold); 3685 } 3686 // Check that the request and result are within the given threshold of each other. 3687 // (expectEquals isn't the most intuitive function name.) 3688 mCollector.expectEquals("Key " + key.getName() + " request: " + requestMode + 3689 " result: " + resultMode + " not within threshold " + threshold + 3690 " of each other", requestMode, resultMode, threshold); 3691 } 3692 } 3693 3694 /** 3695 * Basic verification for the control mode capture result. 3696 * 3697 * @param key The capture result key to be verified against 3698 * @param requestMode The request mode for this result 3699 * @param listener The capture listener to get capture results 3700 * @param numFramesVerified The number of capture results to be verified 3701 */ 3702 private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode, 3703 SimpleCaptureCallback listener, int numFramesVerified) { 3704 for (int i = 0; i < numFramesVerified; i++) { 3705 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3706 validatePipelineDepth(result); 3707 T resultMode = getValueNotNull(result, key); 3708 if (VERBOSE) { 3709 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: " 3710 + resultMode.toString()); 3711 } 3712 mCollector.expectEquals("Key " + key.getName() + " result should match request", 3713 requestMode, resultMode); 3714 } 3715 } 3716 3717 /** 3718 * Basic verification that the value of a capture result key should be one of the expected 3719 * values. 3720 * 3721 * @param key The capture result key to be verified against 3722 * @param expectedModes The list of any possible expected modes for this result 3723 * @param listener The capture listener to get capture results 3724 * @param numFramesVerified The number of capture results to be verified 3725 */ 3726 private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes, 3727 SimpleCaptureCallback listener, int numFramesVerified) { 3728 for (int i = 0; i < numFramesVerified; i++) { 3729 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3730 validatePipelineDepth(result); 3731 T resultMode = getValueNotNull(result, key); 3732 if (VERBOSE) { 3733 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: " 3734 + resultMode.toString()); 3735 } 3736 // Capture result should be one of the expected values. 3737 mCollector.expectContains(expectedModes, resultMode); 3738 } 3739 } 3740 3741 /** 3742 * Verify if the fps is slow down for given input request with certain 3743 * controls inside. 3744 * <p> 3745 * This method selects a max preview size for each fps range, and then 3746 * configure the preview stream. Preview is started with the max preview 3747 * size, and then verify if the result frame duration is in the frame 3748 * duration range. 3749 * </p> 3750 * 3751 * @param requestBuilder The request builder that contains post-processing 3752 * controls that could impact the output frame rate, such as 3753 * {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of 3754 * these controls must be set to some values such that the frame 3755 * rate is not slow down. 3756 * @param numFramesVerified The number of frames to be verified 3757 * @param fpsRanges The fps ranges to be verified 3758 */ 3759 private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder, 3760 int numFramesVerified, List<Range<Integer>> fpsRanges ) throws Exception { 3761 boolean frameDurationAvailable = true; 3762 // Allow a few frames for AE to settle on target FPS range 3763 final int NUM_FRAME_TO_SKIP = 6; 3764 float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN; 3765 if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) { 3766 frameDurationAvailable = false; 3767 // Allow a larger error margin (1.5%) for timestamps 3768 frameDurationErrorMargin = 0.015f; 3769 } 3770 if (mStaticInfo.isExternalCamera()) { 3771 // Allow a even larger error margin (15%) for external camera timestamps 3772 frameDurationErrorMargin = 0.15f; 3773 } 3774 3775 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 3776 Range<Integer> fpsRange; 3777 SimpleCaptureCallback resultListener; 3778 3779 for (int i = 0; i < fpsRanges.size(); i += 1) { 3780 fpsRange = fpsRanges.get(i); 3781 Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange); 3782 // If unable to find a preview size, then log the failure, and skip this run. 3783 if (previewSz == null) { 3784 if (mStaticInfo.isCapabilitySupported( 3785 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 3786 mCollector.addMessage(String.format( 3787 "Unable to find a preview size supporting given fps range %s", 3788 fpsRange)); 3789 } 3790 continue; 3791 } 3792 3793 if (VERBOSE) { 3794 Log.v(TAG, String.format("Test fps range %s for preview size %s", 3795 fpsRange, previewSz.toString())); 3796 } 3797 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 3798 // Turn off auto antibanding to avoid exposure time and frame duration interference 3799 // from antibanding algorithm. 3800 if (antiBandingOffIsSupported) { 3801 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 3802 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 3803 } else { 3804 // The device doesn't implement the OFF mode, test continues. It need make sure 3805 // that the antibanding algorithm doesn't slow down the fps. 3806 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 3807 " not slow down the frame rate regardless of its current antibanding" + 3808 " mode"); 3809 } 3810 3811 resultListener = new SimpleCaptureCallback(); 3812 startPreview(requestBuilder, previewSz, resultListener); 3813 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3814 // Wait several more frames for AE to settle on target FPS range 3815 waitForNumResults(resultListener, NUM_FRAME_TO_SKIP); 3816 3817 long[] frameDurationRange = new long[]{ 3818 (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 3819 long captureTime = 0, prevCaptureTime = 0; 3820 long frameDurationSum = 0; 3821 for (int j = 0; j < numFramesVerified; j++) { 3822 long frameDuration = frameDurationRange[0]; 3823 CaptureResult result = 3824 resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3825 validatePipelineDepth(result); 3826 if (frameDurationAvailable) { 3827 frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3828 } else { 3829 // if frame duration is not available, check timestamp instead 3830 captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3831 if (j > 0) { 3832 frameDuration = captureTime - prevCaptureTime; 3833 } 3834 prevCaptureTime = captureTime; 3835 } 3836 frameDurationSum += frameDuration; 3837 } 3838 long frameDurationAvg = frameDurationSum / numFramesVerified; 3839 mCollector.expectInRange( 3840 "Frame duration must be in the range of " + 3841 Arrays.toString(frameDurationRange), 3842 frameDurationAvg, 3843 (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)), 3844 (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin))); 3845 3846 } 3847 3848 stopPreview(); 3849 } 3850 3851 /** 3852 * Validate the pipeline depth result. 3853 * 3854 * @param result The capture result to get pipeline depth data 3855 */ 3856 private void validatePipelineDepth(CaptureResult result) { 3857 final byte MIN_PIPELINE_DEPTH = 1; 3858 byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked(); 3859 Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH); 3860 mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]", 3861 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH, 3862 maxPipelineDepth); 3863 } 3864 3865 /** 3866 * Calculate the anti-flickering corrected exposure time. 3867 * <p> 3868 * If the input exposure time is very short (shorter than flickering 3869 * boundary), which indicate the scene is bright and very likely at outdoor 3870 * environment, skip the correction, as it doesn't make much sense by doing so. 3871 * </p> 3872 * <p> 3873 * For long exposure time (larger than the flickering boundary), find the 3874 * exposure time that is closest to the flickering boundary. 3875 * </p> 3876 * 3877 * @param flickeringMode The flickering mode 3878 * @param exposureTime The input exposureTime to be corrected 3879 * @return anti-flickering corrected exposure time 3880 */ 3881 private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) { 3882 if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) { 3883 throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz"); 3884 } 3885 long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS; 3886 if (flickeringMode == ANTI_FLICKERING_60HZ) { 3887 flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS; 3888 } 3889 3890 if (exposureTime <= flickeringBoundary) { 3891 return exposureTime; 3892 } 3893 3894 // Find the closest anti-flickering corrected exposure time 3895 long correctedExpTime = exposureTime + (flickeringBoundary / 2); 3896 correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary); 3897 return correctedExpTime; 3898 } 3899 } 3900