1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.cts.CameraTestUtils.*; 20 21 import android.graphics.ImageFormat; 22 import android.view.Surface; 23 24 import com.android.ex.camera2.blocking.BlockingSessionCallback; 25 26 import android.graphics.SurfaceTexture; 27 import android.hardware.camera2.CameraCaptureSession; 28 import android.hardware.camera2.CameraCaptureSession.CaptureCallback; 29 import android.hardware.camera2.CameraDevice; 30 import android.hardware.camera2.CaptureFailure; 31 import android.hardware.camera2.CaptureRequest; 32 import android.hardware.camera2.CaptureResult; 33 import android.hardware.camera2.TotalCaptureResult; 34 import android.util.Size; 35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 36 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 37 import android.hardware.camera2.params.OutputConfiguration; 38 import android.util.Log; 39 import android.util.Pair; 40 import android.util.Range; 41 42 import org.mockito.ArgumentCaptor; 43 import org.mockito.ArgumentMatcher; 44 45 import static org.mockito.Mockito.*; 46 47 import java.util.ArrayList; 48 import java.util.Arrays; 49 import java.util.List; 50 51 /** 52 * CameraDevice preview test by using SurfaceView. 53 */ 54 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase { 55 private static final String TAG = "SurfaceViewPreviewTest"; 56 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 57 private static final int FRAME_TIMEOUT_MS = 1000; 58 private static final int NUM_FRAMES_VERIFIED = 30; 59 private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60; 60 private static final float FRAME_DURATION_ERROR_MARGIN = 0.005f; // 0.5 percent error margin. 61 private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s 62 63 @Override setUp()64 protected void setUp() throws Exception { 65 super.setUp(); 66 } 67 68 @Override tearDown()69 protected void tearDown() throws Exception { 70 super.tearDown(); 71 } 72 73 /** 74 * Test all supported preview sizes for each camera device. 75 * <p> 76 * For the first {@link #NUM_FRAMES_VERIFIED} of capture results, 77 * the {@link CaptureCallback} callback availability and the capture timestamp 78 * (monotonically increasing) ordering are verified. 79 * </p> 80 */ testCameraPreview()81 public void testCameraPreview() throws Exception { 82 for (int i = 0; i < mCameraIds.length; i++) { 83 try { 84 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]); 85 openDevice(mCameraIds[i]); 86 if (!mStaticInfo.isColorOutputSupported()) { 87 Log.i(TAG, "Camera " + mCameraIds[i] + 88 " does not support color outputs, skipping"); 89 continue; 90 } 91 previewTestByCamera(); 92 } finally { 93 closeDevice(); 94 } 95 } 96 } 97 98 /** 99 * Basic test pattern mode preview. 100 * <p> 101 * Only test the test pattern preview and capture result, the image buffer 102 * is not validated. 103 * </p> 104 */ testBasicTestPatternPreview()105 public void testBasicTestPatternPreview() throws Exception{ 106 for (int i = 0; i < mCameraIds.length; i++) { 107 try { 108 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]); 109 openDevice(mCameraIds[i]); 110 if (!mStaticInfo.isColorOutputSupported()) { 111 Log.i(TAG, "Camera " + mCameraIds[i] + 112 " does not support color outputs, skipping"); 113 continue; 114 } 115 previewTestPatternTestByCamera(); 116 } finally { 117 closeDevice(); 118 } 119 } 120 } 121 122 /** 123 * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview 124 * frame duration and exposure time. 125 */ testPreviewFpsRange()126 public void testPreviewFpsRange() throws Exception { 127 for (String id : mCameraIds) { 128 try { 129 openDevice(id); 130 if (!mStaticInfo.isColorOutputSupported()) { 131 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 132 continue; 133 } 134 previewFpsRangeTestByCamera(); 135 } finally { 136 closeDevice(); 137 } 138 } 139 } 140 141 /** 142 * Test surface set streaming use cases. 143 * 144 * <p> 145 * The test sets output configuration with increasing surface set IDs for preview and YUV 146 * streams. The max supported preview size is selected for preview stream, and the max 147 * supported YUV size (depending on hw supported level) is selected for YUV stream. This test 148 * also exercises the prepare API. 149 * </p> 150 */ testSurfaceSet()151 public void testSurfaceSet() throws Exception { 152 for (String id : mCameraIds) { 153 try { 154 openDevice(id); 155 if (!mStaticInfo.isColorOutputSupported()) { 156 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 157 continue; 158 } 159 surfaceSetTestByCamera(id); 160 } finally { 161 closeDevice(); 162 } 163 } 164 } 165 166 /** 167 * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the 168 * expected effects on performance. 169 * 170 * - Ensure that prepare() results in onSurfacePrepared() being invoked 171 * - Ensure that prepare() does not cause preview glitches while operating 172 * - Ensure that starting to use a newly-prepared output does not cause additional 173 * preview glitches to occur 174 */ testPreparePerformance()175 public void testPreparePerformance() throws Throwable { 176 for (int i = 0; i < mCameraIds.length; i++) { 177 try { 178 openDevice(mCameraIds[i]); 179 if (!mStaticInfo.isColorOutputSupported()) { 180 Log.i(TAG, "Camera " + mCameraIds[i] + 181 " does not support color outputs, skipping"); 182 continue; 183 } 184 preparePerformanceTestByCamera(mCameraIds[i]); 185 } 186 finally { 187 closeDevice(); 188 } 189 } 190 } 191 preparePerformanceTestByCamera(String cameraId)192 private void preparePerformanceTestByCamera(String cameraId) throws Exception { 193 final int MAX_IMAGES_TO_PREPARE = 10; 194 final int UNKNOWN_LATENCY_RESULT_WAIT = 5; 195 final int MAX_RESULTS_TO_WAIT = 10; 196 final int FRAMES_FOR_AVERAGING = 100; 197 final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference 198 final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference 199 200 Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0); 201 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 202 203 // Don't need image data, just drop it right away to minimize overhead 204 ImageDropperListener imageListener = new ImageDropperListener(); 205 206 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 207 208 CaptureRequest.Builder previewRequest = 209 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 210 211 // Configure outputs and session 212 213 updatePreviewSurface(maxPreviewSize); 214 215 createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener); 216 217 List<Surface> outputSurfaces = new ArrayList<Surface>(); 218 outputSurfaces.add(mPreviewSurface); 219 outputSurfaces.add(mReaderSurface); 220 221 CameraCaptureSession.StateCallback mockSessionListener = 222 mock(CameraCaptureSession.StateCallback.class); 223 224 mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler); 225 226 previewRequest.addTarget(mPreviewSurface); 227 Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange(); 228 previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget); 229 230 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 231 232 // Converge AE 233 waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT); 234 235 if (mStaticInfo.isAeLockSupported()) { 236 // Lock AE if possible to improve stability 237 previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true); 238 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 239 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 240 // Legacy mode doesn't output AE state 241 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, 242 CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT); 243 } 244 } 245 246 // Measure frame rate for a bit 247 Pair<Long, Long> frameDurationStats = 248 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0); 249 250 Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms", 251 frameDurationStats.first / 1e6, frameDurationStats.second / 1e6)); 252 253 // Drain results, do prepare 254 resultListener.drain(); 255 256 mSession.prepare(mReaderSurface); 257 258 verify(mockSessionListener, 259 timeout(PREPARE_TIMEOUT_MS).times(1)). 260 onSurfacePrepared(eq(mSession), eq(mReaderSurface)); 261 262 // Calculate frame rate during prepare 263 264 int resultsReceived = (int) resultListener.getTotalNumFrames(); 265 if (resultsReceived > 2) { 266 // Only verify frame rate if there are a couple of results 267 Pair<Long, Long> whilePreparingFrameDurationStats = 268 measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0); 269 270 Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms", 271 whilePreparingFrameDurationStats.first / 1e6, 272 whilePreparingFrameDurationStats.second / 1e6)); 273 274 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 275 mCollector.expectTrue( 276 String.format("Camera %s: Preview peak frame interval affected by prepare " + 277 "call: preview avg frame duration: %f ms, peak during prepare: %f ms", 278 cameraId, 279 frameDurationStats.first / 1e6, 280 whilePreparingFrameDurationStats.second / 1e6), 281 (whilePreparingFrameDurationStats.second <= 282 frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS))); 283 mCollector.expectTrue( 284 String.format("Camera %s: Preview average frame interval affected by prepare " + 285 "call: preview avg frame duration: %f ms, during prepare: %f ms", 286 cameraId, 287 frameDurationStats.first / 1e6, 288 whilePreparingFrameDurationStats.first / 1e6), 289 (whilePreparingFrameDurationStats.first <= 290 frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS))); 291 } 292 } 293 294 resultListener.drain(); 295 296 // Get at least one more preview result without prepared target 297 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 298 long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 299 300 // Now use the prepared stream and ensure there are no hiccups from using it 301 previewRequest.addTarget(mReaderSurface); 302 303 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 304 305 Pair<Long, Long> preparedFrameDurationStats = 306 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp); 307 308 Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms", 309 preparedFrameDurationStats.first / 1e6, 310 preparedFrameDurationStats.second / 1e6)); 311 312 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 313 mCollector.expectTrue( 314 String.format("Camera %s: Preview peak frame interval affected by use of new " + 315 " stream: preview avg frame duration: %f ms, peak with new stream: %f ms", 316 cameraId, 317 frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6), 318 (preparedFrameDurationStats.second <= 319 frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS))); 320 mCollector.expectTrue( 321 String.format("Camera %s: Preview average frame interval affected by use of new " + 322 "stream: preview avg frame duration: %f ms, with new stream: %f ms", 323 cameraId, 324 frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6), 325 (preparedFrameDurationStats.first <= 326 frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS))); 327 } 328 } 329 330 /** 331 * Test to verify correct behavior with the same Surface object being used repeatedly with 332 * different native internals, and multiple Surfaces pointing to the same actual consumer object 333 */ testSurfaceEquality()334 public void testSurfaceEquality() throws Exception { 335 for (int i = 0; i < mCameraIds.length; i++) { 336 try { 337 openDevice(mCameraIds[i]); 338 if (!mStaticInfo.isColorOutputSupported()) { 339 Log.i(TAG, "Camera " + mCameraIds[i] + 340 " does not support color outputs, skipping"); 341 continue; 342 } 343 surfaceEqualityTestByCamera(mCameraIds[i]); 344 } 345 finally { 346 closeDevice(); 347 } 348 } 349 } 350 surfaceEqualityTestByCamera(String cameraId)351 private void surfaceEqualityTestByCamera(String cameraId) throws Exception { 352 final int SOME_FRAMES = 10; 353 354 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 355 356 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 357 358 // Create a SurfaceTexture for a second output 359 SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5); 360 sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(), 361 maxPreviewSize.getHeight()); 362 Surface sharedOutputSurface1 = new Surface(sharedOutputTexture); 363 364 updatePreviewSurface(maxPreviewSize); 365 366 List<Surface> outputSurfaces = new ArrayList<Surface>(); 367 outputSurfaces.add(mPreviewSurface); 368 outputSurfaces.add(sharedOutputSurface1); 369 370 BlockingSessionCallback sessionListener = 371 new BlockingSessionCallback(); 372 373 mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler); 374 sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY, 375 SESSION_CONFIGURE_TIMEOUT_MS); 376 377 CaptureRequest.Builder previewRequest = 378 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 379 previewRequest.addTarget(mPreviewSurface); 380 previewRequest.addTarget(sharedOutputSurface1); 381 382 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 383 384 // Wait to get some frames out 385 waitForNumResults(resultListener, SOME_FRAMES); 386 387 // Drain 388 mSession.abortCaptures(); 389 sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY, 390 SESSION_CONFIGURE_TIMEOUT_MS); 391 392 // Hide / unhide the SurfaceView to get a new target Surface 393 recreatePreviewSurface(); 394 395 // And resize it again 396 updatePreviewSurface(maxPreviewSize); 397 398 // Create a second surface that targets the shared SurfaceTexture 399 Surface sharedOutputSurface2 = new Surface(sharedOutputTexture); 400 401 // Use the new Surfaces for a new session 402 outputSurfaces.clear(); 403 outputSurfaces.add(mPreviewSurface); 404 outputSurfaces.add(sharedOutputSurface2); 405 406 sessionListener = new BlockingSessionCallback(); 407 408 mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler); 409 410 previewRequest = 411 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 412 previewRequest.addTarget(mPreviewSurface); 413 previewRequest.addTarget(sharedOutputSurface2); 414 415 mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler); 416 417 // Wait to get some frames out 418 waitForNumResults(resultListener, SOME_FRAMES); 419 } 420 421 /** 422 * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the 423 * provided capture listener. If prevTimestamp is positive, it is used for the first interval 424 * calculation; otherwise, the first result is used to establish the starting time. 425 * 426 * Returns the mean interval in the first pair entry, and the largest interval in the second 427 * pair entry 428 */ measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, long prevTimestamp)429 Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, 430 long prevTimestamp) throws Exception { 431 long summedIntervals = 0; 432 long maxInterval = 0; 433 int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1); 434 435 for (int i = 0; i < frameCount; i++) { 436 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 437 long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 438 if (prevTimestamp > 0) { 439 long interval = timestamp - prevTimestamp; 440 if (interval > maxInterval) maxInterval = interval; 441 summedIntervals += interval; 442 } 443 prevTimestamp = timestamp; 444 } 445 return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval); 446 } 447 448 449 /** 450 * Test preview fps range for all supported ranges. The exposure time are frame duration are 451 * validated. 452 */ previewFpsRangeTestByCamera()453 private void previewFpsRangeTestByCamera() throws Exception { 454 Size maxPreviewSz = mOrderedPreviewSizes.get(0); 455 Range<Integer>[] fpsRanges = mStaticInfo.getAeAvailableTargetFpsRangesChecked(); 456 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 457 Range<Integer> fpsRange; 458 CaptureRequest.Builder requestBuilder = 459 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 460 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 461 startPreview(requestBuilder, maxPreviewSz, resultListener); 462 463 for (int i = 0; i < fpsRanges.length; i += 1) { 464 fpsRange = fpsRanges[i]; 465 466 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 467 // Turn off auto antibanding to avoid exposure time and frame duration interference 468 // from antibanding algorithm. 469 if (antiBandingOffIsSupported) { 470 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 471 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 472 } else { 473 // The device doesn't implement the OFF mode, test continues. It need make sure 474 // that the antibanding algorithm doesn't interfere with the fps range control. 475 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 476 " satisfy the specified fps range regardless of its current antibanding" + 477 " mode"); 478 } 479 480 resultListener = new SimpleCaptureCallback(); 481 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 482 483 verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange, 484 maxPreviewSz); 485 } 486 487 stopPreview(); 488 } 489 verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, int numFramesVerified, Range<Integer> fpsRange, Size previewSz)490 private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, 491 int numFramesVerified, Range<Integer> fpsRange, Size previewSz) { 492 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 493 List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked(); 494 495 if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 496 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 497 long[] frameDurationRange = 498 new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 499 mCollector.expectInRange( 500 "Frame duration must be in the range of " + Arrays.toString(frameDurationRange), 501 frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)), 502 (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN))); 503 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 504 mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame" 505 + "duration %d", expTime, frameDuration), expTime <= frameDuration); 506 507 Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz); 508 boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size " 509 + previewSz.toString(), minFrameDuration != null); 510 if (findDuration) { 511 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than" 512 + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration); 513 } 514 } else { 515 Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," + 516 " skipping duration and exposure time check."); 517 } 518 } 519 520 /** 521 * Test all supported preview sizes for a camera device 522 * 523 * @throws Exception 524 */ previewTestByCamera()525 private void previewTestByCamera() throws Exception { 526 List<Size> previewSizes = getSupportedPreviewSizes( 527 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND); 528 529 for (final Size sz : previewSizes) { 530 if (VERBOSE) { 531 Log.v(TAG, "Testing camera preview size: " + sz.toString()); 532 } 533 534 // TODO: vary the different settings like crop region to cover more cases. 535 CaptureRequest.Builder requestBuilder = 536 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 537 CaptureCallback mockCaptureCallback = 538 mock(CameraCaptureSession.CaptureCallback.class); 539 540 startPreview(requestBuilder, sz, mockCaptureCallback); 541 verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED, 542 NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 543 stopPreview(); 544 } 545 } 546 previewTestPatternTestByCamera()547 private void previewTestPatternTestByCamera() throws Exception { 548 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 549 int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked(); 550 CaptureRequest.Builder requestBuilder = 551 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 552 CaptureCallback mockCaptureCallback; 553 554 final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0. 555 for (int mode : testPatternModes) { 556 if (VERBOSE) { 557 Log.v(TAG, "Test pattern mode: " + mode); 558 } 559 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode); 560 if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) { 561 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA 562 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA); 563 } 564 mockCaptureCallback = mock(CaptureCallback.class); 565 startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback); 566 verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED, 567 NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 568 } 569 570 stopPreview(); 571 } 572 surfaceSetTestByCamera(String cameraId)573 private void surfaceSetTestByCamera(String cameraId) throws Exception { 574 final int MAX_SURFACE_GROUP_ID = 10; 575 Size maxPreviewSz = mOrderedPreviewSizes.get(0); 576 Size yuvSizeBound = maxPreviewSz; // Default case: legacy device 577 if (mStaticInfo.isHardwareLevelLimited()) { 578 yuvSizeBound = mOrderedVideoSizes.get(0); 579 } else if (mStaticInfo.isHardwareLevelAtLeastFull()) { 580 yuvSizeBound = null; 581 } 582 Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0); 583 584 CaptureRequest.Builder requestBuilder = 585 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 586 ImageDropperListener imageListener = new ImageDropperListener(); 587 588 updatePreviewSurface(maxPreviewSz); 589 createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener); 590 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(); 591 OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface); 592 OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface); 593 assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId()); 594 assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId()); 595 assertEquals(mPreviewSurface, previewConfig.getSurface()); 596 assertEquals(mReaderSurface, yuvConfig.getSurface()); 597 outputConfigs.add(previewConfig); 598 outputConfigs.add(yuvConfig); 599 requestBuilder.addTarget(mPreviewSurface); 600 requestBuilder.addTarget(mReaderSurface); 601 602 // Test different stream set ID. 603 for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE; 604 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) { 605 if (VERBOSE) { 606 Log.v(TAG, "test preview with surface group id: "); 607 } 608 609 previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface); 610 yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface); 611 outputConfigs.clear(); 612 outputConfigs.add(previewConfig); 613 outputConfigs.add(yuvConfig); 614 615 for (OutputConfiguration config : outputConfigs) { 616 assertEquals(surfaceGroupId, config.getSurfaceGroupId()); 617 } 618 619 CameraCaptureSession.StateCallback mockSessionListener = 620 mock(CameraCaptureSession.StateCallback.class); 621 622 mSession = configureCameraSessionWithConfig(mCamera, outputConfigs, 623 mockSessionListener, mHandler); 624 625 626 mSession.prepare(mPreviewSurface); 627 verify(mockSessionListener, 628 timeout(PREPARE_TIMEOUT_MS).times(1)). 629 onSurfacePrepared(eq(mSession), eq(mPreviewSurface)); 630 631 mSession.prepare(mReaderSurface); 632 verify(mockSessionListener, 633 timeout(PREPARE_TIMEOUT_MS).times(1)). 634 onSurfacePrepared(eq(mSession), eq(mReaderSurface)); 635 636 CaptureRequest request = requestBuilder.build(); 637 CaptureCallback mockCaptureCallback = 638 mock(CameraCaptureSession.CaptureCallback.class); 639 mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler); 640 verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED, 641 NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS); 642 } 643 } 644 645 private class IsCaptureResultValid extends ArgumentMatcher<TotalCaptureResult> { 646 @Override matches(Object obj)647 public boolean matches(Object obj) { 648 TotalCaptureResult result = (TotalCaptureResult)obj; 649 Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 650 if (timeStamp != null && timeStamp.longValue() > 0L) { 651 return true; 652 } 653 return false; 654 } 655 } 656 verifyCaptureResults( CameraCaptureSession session, CaptureCallback mockListener, int expectResultCount, int timeOutMs)657 private void verifyCaptureResults( 658 CameraCaptureSession session, 659 CaptureCallback mockListener, 660 int expectResultCount, 661 int timeOutMs) { 662 // Should receive expected number of onCaptureStarted callbacks. 663 ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class); 664 ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class); 665 verify(mockListener, 666 timeout(timeOutMs).atLeast(expectResultCount)) 667 .onCaptureStarted( 668 eq(session), 669 isA(CaptureRequest.class), 670 timestamps.capture(), 671 frameNumbers.capture()); 672 673 // Validate timestamps: all timestamps should be larger than 0 and monotonically increase. 674 long timestamp = 0; 675 for (Long nextTimestamp : timestamps.getAllValues()) { 676 assertNotNull("Next timestamp is null!", nextTimestamp); 677 assertTrue("Captures are out of order", timestamp < nextTimestamp); 678 timestamp = nextTimestamp; 679 } 680 681 // Validate framenumbers: all framenumbers should be consecutive and positive 682 long frameNumber = -1; 683 for (Long nextFrameNumber : frameNumbers.getAllValues()) { 684 assertNotNull("Next frame number is null!", nextFrameNumber); 685 assertTrue("Captures are out of order", 686 (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber)); 687 frameNumber = nextFrameNumber; 688 } 689 690 // Should receive expected number of capture results. 691 verify(mockListener, 692 timeout(timeOutMs).atLeast(expectResultCount)) 693 .onCaptureCompleted( 694 eq(session), 695 isA(CaptureRequest.class), 696 argThat(new IsCaptureResultValid())); 697 698 // Should not receive any capture failed callbacks. 699 verify(mockListener, never()) 700 .onCaptureFailed( 701 eq(session), 702 isA(CaptureRequest.class), 703 isA(CaptureFailure.class)); 704 } 705 706 } 707