1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static com.android.ex.camera2.blocking.BlockingSessionCallback.SESSION_CLOSED; 20 21 import android.graphics.ImageFormat; 22 import android.hardware.camera2.CameraAccessException; 23 import android.hardware.camera2.CameraCaptureSession; 24 import android.hardware.camera2.CameraCaptureSession.CaptureCallback; 25 import android.hardware.camera2.CameraDevice; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.TotalCaptureResult; 29 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 30 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener; 31 import android.hardware.camera2.cts.helpers.StaticMetadata; 32 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel; 33 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 34 import android.hardware.camera2.params.InputConfiguration; 35 import android.media.Image; 36 import android.media.ImageReader; 37 import android.media.ImageWriter; 38 import android.os.ConditionVariable; 39 import android.os.SystemClock; 40 import android.util.Log; 41 import android.util.Pair; 42 import android.util.Size; 43 import android.view.Surface; 44 45 import com.android.compatibility.common.util.DeviceReportLog; 46 import com.android.compatibility.common.util.ResultType; 47 import com.android.compatibility.common.util.ResultUnit; 48 import com.android.compatibility.common.util.Stat; 49 import com.android.ex.camera2.blocking.BlockingSessionCallback; 50 import com.android.ex.camera2.exceptions.TimeoutRuntimeException; 51 52 import java.util.ArrayList; 53 import java.util.Arrays; 54 import java.util.List; 55 import java.util.concurrent.LinkedBlockingQueue; 56 import java.util.concurrent.TimeUnit; 57 58 /** 59 * Test camera2 API use case performance KPIs, such as camera open time, session creation time, 60 * shutter lag etc. The KPI data will be reported in cts results. 61 */ 62 public class PerformanceTest extends Camera2SurfaceViewTestCase { 63 private static final String TAG = "PerformanceTest"; 64 private static final String REPORT_LOG_NAME = "CtsCameraTestCases"; 65 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 66 private static final int NUM_TEST_LOOPS = 5; 67 private static final int NUM_MAX_IMAGES = 4; 68 private static final int NUM_RESULTS_WAIT = 30; 69 private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE}; 70 private final int MAX_REPROCESS_IMAGES = 6; 71 private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES; 72 private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES; 73 // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request 74 // count to maintain reasonable number of candidate image for the worse-case. 75 private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2; 76 private final double REPROCESS_STALL_MARGIN = 0.1; 77 78 private DeviceReportLog mReportLog; 79 80 // Used for reading camera output buffers. 81 private ImageReader mCameraZslReader; 82 private SimpleImageReaderListener mCameraZslImageListener; 83 // Used for reprocessing (jpeg) output. 84 private ImageReader mJpegReader; 85 private SimpleImageReaderListener mJpegListener; 86 // Used for reprocessing input. 87 private ImageWriter mWriter; 88 private SimpleCaptureCallback mZslResultListener; 89 90 @Override setUp()91 protected void setUp() throws Exception { 92 super.setUp(); 93 } 94 95 @Override tearDown()96 protected void tearDown() throws Exception { 97 super.tearDown(); 98 } 99 100 /** 101 * Test camera launch KPI: the time duration between a camera device is 102 * being opened and first preview frame is available. 103 * <p> 104 * It includes camera open time, session creation time, and sending first 105 * preview request processing latency etc. For the SurfaceView based preview use 106 * case, there is no way for client to know the exact preview frame 107 * arrival time. To approximate this time, a companion YUV420_888 stream is 108 * created. The first YUV420_888 Image coming out of the ImageReader is treated 109 * as the first preview arrival time.</p> 110 * <p> 111 * For depth-only devices, timing is done with the DEPTH16 format instead. 112 * </p> 113 */ testCameraLaunch()114 public void testCameraLaunch() throws Exception { 115 double[] avgCameraLaunchTimes = new double[mCameraIds.length]; 116 117 int counter = 0; 118 for (String id : mCameraIds) { 119 // Do NOT move these variables to outer scope 120 // They will be passed to DeviceReportLog and their references will be stored 121 String streamName = "test_camera_launch"; 122 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 123 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 124 double[] cameraOpenTimes = new double[NUM_TEST_LOOPS]; 125 double[] configureStreamTimes = new double[NUM_TEST_LOOPS]; 126 double[] startPreviewTimes = new double[NUM_TEST_LOOPS]; 127 double[] stopPreviewTimes = new double[NUM_TEST_LOOPS]; 128 double[] cameraCloseTimes = new double[NUM_TEST_LOOPS]; 129 double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS]; 130 try { 131 mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(id)); 132 if (mStaticInfo.isColorOutputSupported()) { 133 initializeImageReader(id, ImageFormat.YUV_420_888); 134 } else { 135 assertTrue("Depth output must be supported if regular output isn't!", 136 mStaticInfo.isDepthOutputSupported()); 137 initializeImageReader(id, ImageFormat.DEPTH16); 138 } 139 140 SimpleImageListener imageListener = null; 141 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs; 142 for (int i = 0; i < NUM_TEST_LOOPS; i++) { 143 try { 144 // Need create a new listener every iteration to be able to wait 145 // for the first image comes out. 146 imageListener = new SimpleImageListener(); 147 mReader.setOnImageAvailableListener(imageListener, mHandler); 148 startTimeMs = SystemClock.elapsedRealtime(); 149 150 // Blocking open camera 151 simpleOpenCamera(id); 152 openTimeMs = SystemClock.elapsedRealtime(); 153 cameraOpenTimes[i] = openTimeMs - startTimeMs; 154 155 // Blocking configure outputs. 156 configureReaderAndPreviewOutputs(); 157 configureTimeMs = SystemClock.elapsedRealtime(); 158 configureStreamTimes[i] = configureTimeMs - openTimeMs; 159 160 // Blocking start preview (start preview to first image arrives) 161 SimpleCaptureCallback resultListener = 162 new SimpleCaptureCallback(); 163 blockingStartPreview(resultListener, imageListener); 164 previewStartedTimeMs = SystemClock.elapsedRealtime(); 165 startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs; 166 cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs; 167 168 // Let preview on for a couple of frames 169 waitForNumResults(resultListener, NUM_RESULTS_WAIT); 170 171 // Blocking stop preview 172 startTimeMs = SystemClock.elapsedRealtime(); 173 blockingStopPreview(); 174 stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs; 175 } 176 finally { 177 // Blocking camera close 178 startTimeMs = SystemClock.elapsedRealtime(); 179 closeDevice(); 180 cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs; 181 } 182 } 183 184 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes); 185 // Finish the data collection, report the KPIs. 186 // ReportLog keys have to be lowercase underscored format. 187 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER, 188 ResultUnit.MS); 189 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes, 190 ResultType.LOWER_BETTER, ResultUnit.MS); 191 mReportLog.addValues("camera_start_preview_time", startPreviewTimes, 192 ResultType.LOWER_BETTER, ResultUnit.MS); 193 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes, 194 ResultType.LOWER_BETTER, ResultUnit.MS); 195 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes, 196 ResultType.LOWER_BETTER, ResultUnit.MS); 197 mReportLog.addValues("camera_launch_time", cameraLaunchTimes, 198 ResultType.LOWER_BETTER, ResultUnit.MS); 199 } 200 finally { 201 closeImageReader(); 202 } 203 counter++; 204 mReportLog.submit(getInstrumentation()); 205 } 206 if (mCameraIds.length != 0) { 207 String streamName = "test_camera_launch_average"; 208 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 209 mReportLog.setSummary("camera_launch_average_time_for_all_cameras", 210 Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS); 211 mReportLog.submit(getInstrumentation()); 212 } 213 } 214 215 /** 216 * Test camera capture KPI for YUV_420_888 format: the time duration between 217 * sending out a single image capture request and receiving image data and 218 * capture result. 219 * <p> 220 * It enumerates the following metrics: capture latency, computed by 221 * measuring the time between sending out the capture request and getting 222 * the image data; partial result latency, computed by measuring the time 223 * between sending out the capture request and getting the partial result; 224 * capture result latency, computed by measuring the time between sending 225 * out the capture request and getting the full capture result. 226 * </p> 227 */ testSingleCapture()228 public void testSingleCapture() throws Exception { 229 double[] avgResultTimes = new double[mCameraIds.length]; 230 231 int counter = 0; 232 for (String id : mCameraIds) { 233 // Do NOT move these variables to outer scope 234 // They will be passed to DeviceReportLog and their references will be stored 235 String streamName = "test_single_capture"; 236 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 237 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 238 double[] captureTimes = new double[NUM_TEST_LOOPS]; 239 double[] getPartialTimes = new double[NUM_TEST_LOOPS]; 240 double[] getResultTimes = new double[NUM_TEST_LOOPS]; 241 try { 242 openDevice(id); 243 244 if (!mStaticInfo.isColorOutputSupported()) { 245 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 246 continue; 247 } 248 249 250 boolean partialsExpected = mStaticInfo.getPartialResultCount() > 1; 251 long startTimeMs; 252 boolean isPartialTimingValid = partialsExpected; 253 for (int i = 0; i < NUM_TEST_LOOPS; i++) { 254 255 // setup builders and listeners 256 CaptureRequest.Builder previewBuilder = 257 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 258 CaptureRequest.Builder captureBuilder = 259 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 260 SimpleCaptureCallback previewResultListener = 261 new SimpleCaptureCallback(); 262 SimpleTimingResultListener captureResultListener = 263 new SimpleTimingResultListener(); 264 SimpleImageListener imageListener = new SimpleImageListener(); 265 266 Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat( 267 id, mCameraManager, ImageFormat.YUV_420_888, /*bound*/null).get(0); 268 269 prepareCaptureAndStartPreview(previewBuilder, captureBuilder, 270 mOrderedPreviewSizes.get(0), maxYuvSize, 271 ImageFormat.YUV_420_888, previewResultListener, 272 NUM_MAX_IMAGES, imageListener); 273 274 // Capture an image and get image data 275 startTimeMs = SystemClock.elapsedRealtime(); 276 CaptureRequest request = captureBuilder.build(); 277 mSession.capture(request, captureResultListener, mHandler); 278 279 Pair<CaptureResult, Long> partialResultNTime = null; 280 if (partialsExpected) { 281 partialResultNTime = captureResultListener.getPartialResultNTimeForRequest( 282 request, NUM_RESULTS_WAIT); 283 // Even if maxPartials > 1, may not see partials for some devices 284 if (partialResultNTime == null) { 285 partialsExpected = false; 286 isPartialTimingValid = false; 287 } 288 } 289 Pair<CaptureResult, Long> captureResultNTime = 290 captureResultListener.getCaptureResultNTimeForRequest( 291 request, NUM_RESULTS_WAIT); 292 imageListener.waitForImageAvailable( 293 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 294 295 captureTimes[i] = imageListener.getTimeReceivedImage() - startTimeMs; 296 if (partialsExpected) { 297 getPartialTimes[i] = partialResultNTime.second - startTimeMs; 298 if (getPartialTimes[i] < 0) { 299 isPartialTimingValid = false; 300 } 301 } 302 getResultTimes[i] = captureResultNTime.second - startTimeMs; 303 304 // simulate real scenario (preview runs a bit) 305 waitForNumResults(previewResultListener, NUM_RESULTS_WAIT); 306 307 stopPreview(); 308 309 } 310 mReportLog.addValues("camera_capture_latency", captureTimes, 311 ResultType.LOWER_BETTER, ResultUnit.MS); 312 // If any of the partial results do not contain AE and AF state, then no report 313 if (isPartialTimingValid) { 314 mReportLog.addValues("camera_partial_result_latency", getPartialTimes, 315 ResultType.LOWER_BETTER, ResultUnit.MS); 316 } 317 mReportLog.addValues("camera_capture_result_latency", getResultTimes, 318 ResultType.LOWER_BETTER, ResultUnit.MS); 319 320 avgResultTimes[counter] = Stat.getAverage(getResultTimes); 321 } 322 finally { 323 closeImageReader(); 324 closeDevice(); 325 } 326 counter++; 327 mReportLog.submit(getInstrumentation()); 328 } 329 330 // Result will not be reported in CTS report if no summary is printed. 331 if (mCameraIds.length != 0) { 332 String streamName = "test_single_capture_average"; 333 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 334 mReportLog.setSummary("camera_capture_result_average_latency_for_all_cameras", 335 Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS); 336 mReportLog.submit(getInstrumentation()); 337 } 338 } 339 340 /** 341 * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time 342 * a reprocess request is issued to the time the reprocess image is returned. 343 */ testReprocessingLatency()344 public void testReprocessingLatency() throws Exception { 345 for (String id : mCameraIds) { 346 for (int format : REPROCESS_FORMATS) { 347 if (!isReprocessSupported(id, format)) { 348 continue; 349 } 350 351 try { 352 openDevice(id); 353 String streamName = "test_reprocessing_latency"; 354 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 355 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 356 mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE); 357 reprocessingPerformanceTestByCamera(format, /*asyncMode*/false, 358 /*highQuality*/false); 359 } finally { 360 closeReaderWriters(); 361 closeDevice(); 362 mReportLog.submit(getInstrumentation()); 363 } 364 } 365 } 366 } 367 368 /** 369 * Test reprocessing throughput with default NR and edge options, i.e., how many frames can be reprocessed 370 * during a given amount of time. 371 * 372 */ testReprocessingThroughput()373 public void testReprocessingThroughput() throws Exception { 374 for (String id : mCameraIds) { 375 for (int format : REPROCESS_FORMATS) { 376 if (!isReprocessSupported(id, format)) { 377 continue; 378 } 379 380 try { 381 openDevice(id); 382 String streamName = "test_reprocessing_throughput"; 383 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 384 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 385 mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE); 386 reprocessingPerformanceTestByCamera(format, /*asyncMode*/true, 387 /*highQuality*/false); 388 } finally { 389 closeReaderWriters(); 390 closeDevice(); 391 mReportLog.submit(getInstrumentation()); 392 } 393 } 394 } 395 } 396 397 /** 398 * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the 399 * time a reprocess request is issued to the time the reprocess image is returned. 400 */ testHighQualityReprocessingLatency()401 public void testHighQualityReprocessingLatency() throws Exception { 402 for (String id : mCameraIds) { 403 for (int format : REPROCESS_FORMATS) { 404 if (!isReprocessSupported(id, format)) { 405 continue; 406 } 407 408 try { 409 openDevice(id); 410 String streamName = "test_high_quality_reprocessing_latency"; 411 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 412 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 413 mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE); 414 reprocessingPerformanceTestByCamera(format, /*asyncMode*/false, 415 /*requireHighQuality*/true); 416 } finally { 417 closeReaderWriters(); 418 closeDevice(); 419 mReportLog.submit(getInstrumentation()); 420 } 421 } 422 } 423 } 424 425 /** 426 * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can 427 * be reprocessed during a given amount of time. 428 * 429 */ testHighQualityReprocessingThroughput()430 public void testHighQualityReprocessingThroughput() throws Exception { 431 for (String id : mCameraIds) { 432 for (int format : REPROCESS_FORMATS) { 433 if (!isReprocessSupported(id, format)) { 434 continue; 435 } 436 437 try { 438 openDevice(id); 439 String streamName = "test_high_quality_reprocessing_throughput"; 440 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 441 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 442 mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE); 443 reprocessingPerformanceTestByCamera(format, /*asyncMode*/true, 444 /*requireHighQuality*/true); 445 } finally { 446 closeReaderWriters(); 447 closeDevice(); 448 mReportLog.submit(getInstrumentation()); 449 } 450 } 451 } 452 } 453 454 /** 455 * Testing reprocessing caused preview stall (frame drops) 456 */ testReprocessingCaptureStall()457 public void testReprocessingCaptureStall() throws Exception { 458 for (String id : mCameraIds) { 459 for (int format : REPROCESS_FORMATS) { 460 if (!isReprocessSupported(id, format)) { 461 continue; 462 } 463 464 try { 465 openDevice(id); 466 String streamName = "test_reprocessing_capture_stall"; 467 mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName); 468 mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE); 469 mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE); 470 reprocessingCaptureStallTestByCamera(format); 471 } finally { 472 closeReaderWriters(); 473 closeDevice(); 474 mReportLog.submit(getInstrumentation()); 475 } 476 } 477 } 478 } 479 reprocessingCaptureStallTestByCamera(int reprocessInputFormat)480 private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception { 481 prepareReprocessCapture(reprocessInputFormat); 482 483 // Let it stream for a while before reprocessing 484 startZslStreaming(); 485 waitForFrames(NUM_RESULTS_WAIT); 486 487 final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2; 488 // Prepare several reprocessing request 489 Image[] inputImages = new Image[NUM_REPROCESS_TESTED]; 490 CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES]; 491 for (int i = 0; i < NUM_REPROCESS_TESTED; i++) { 492 inputImages[i] = 493 mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 494 TotalCaptureResult zslResult = 495 mZslResultListener.getCaptureResult( 496 WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp()); 497 reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult); 498 reprocessReqs[i].addTarget(mJpegReader.getSurface()); 499 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE, 500 CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY); 501 reprocessReqs[i].set(CaptureRequest.EDGE_MODE, 502 CaptureRequest.EDGE_MODE_HIGH_QUALITY); 503 mWriter.queueInputImage(inputImages[i]); 504 } 505 506 double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED]; 507 double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED]; 508 Arrays.fill(averageFrameDurationMs, 0.0); 509 final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20; 510 SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback(); 511 for (int i = 0; i < NUM_REPROCESS_TESTED; i++) { 512 mZslResultListener.drain(); 513 CaptureRequest reprocessRequest = reprocessReqs[i].build(); 514 mSession.capture(reprocessRequest, reprocessResultListener, mHandler); 515 // Wait for reprocess output jpeg and result come back. 516 reprocessResultListener.getCaptureResultForRequest(reprocessRequest, 517 CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS); 518 mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close(); 519 long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames(); 520 assertTrue("Reprocess capture result should be returned in " 521 + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames", 522 numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT); 523 524 // Need look longer time, as the stutter could happen after the reprocessing 525 // output frame is received. 526 long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1]; 527 Arrays.fill(timestampGap, 0); 528 CaptureResult[] results = new CaptureResult[timestampGap.length]; 529 long[] frameDurationsNs = new long[timestampGap.length]; 530 for (int j = 0; j < results.length; j++) { 531 results[j] = mZslResultListener.getCaptureResult( 532 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 533 if (j > 0) { 534 timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) - 535 results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP); 536 assertTrue("Time stamp should be monotonically increasing", 537 timestampGap[j] > 0); 538 } 539 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION); 540 } 541 542 if (VERBOSE) { 543 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap)); 544 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs)); 545 } 546 547 // Get the number of candidate results, calculate the average frame duration 548 // and max timestamp gap. 549 Arrays.sort(timestampGap); 550 double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0; 551 for (int m = 0; m < frameDurationsNs.length; m++) { 552 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0); 553 } 554 averageFrameDurationMs[i] /= frameDurationsNs.length; 555 556 maxCaptureGapsMs[i] = maxTimestampGapMs; 557 } 558 559 stopZslStreaming(); 560 561 String reprocessType = "YUV reprocessing"; 562 if (reprocessInputFormat == ImageFormat.PRIVATE) { 563 reprocessType = "opaque reprocessing"; 564 } 565 mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE); 566 mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs, 567 ResultType.LOWER_BETTER, ResultUnit.MS); 568 mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs, 569 ResultType.LOWER_BETTER, ResultUnit.MS); 570 mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps", 571 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS); 572 573 // The max timestamp gap should be less than (captureStall + 1) x average frame 574 // duration * (1 + error margin). 575 int maxCaptureStallFrames = mStaticInfo.getMaxCaptureStallOrDefault(); 576 for (int i = 0; i < maxCaptureGapsMs.length; i++) { 577 double stallDurationBound = averageFrameDurationMs[i] * 578 (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN); 579 assertTrue("max capture stall duration should be no larger than " + stallDurationBound, 580 maxCaptureGapsMs[i] <= stallDurationBound); 581 } 582 } 583 reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode, boolean requireHighQuality)584 private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode, 585 boolean requireHighQuality) 586 throws Exception { 587 // Prepare the reprocessing capture 588 prepareReprocessCapture(reprocessInputFormat); 589 590 // Start ZSL streaming 591 startZslStreaming(); 592 waitForFrames(NUM_RESULTS_WAIT); 593 594 CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES]; 595 Image[] inputImages = new Image[MAX_REPROCESS_IMAGES]; 596 double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES]; 597 long startTimeMs; 598 for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) { 599 inputImages[i] = 600 mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 601 TotalCaptureResult zslResult = 602 mZslResultListener.getCaptureResult( 603 WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp()); 604 reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult); 605 if (requireHighQuality) { 606 // Reprocessing should support high quality for NR and edge modes. 607 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE, 608 CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY); 609 reprocessReqs[i].set(CaptureRequest.EDGE_MODE, 610 CaptureRequest.EDGE_MODE_HIGH_QUALITY); 611 } 612 reprocessReqs[i].addTarget(mJpegReader.getSurface()); 613 } 614 615 if (asyncMode) { 616 // async capture: issue all the reprocess requests as quick as possible, then 617 // check the throughput of the output jpegs. 618 for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) { 619 // Could be slow for YUV reprocessing, do it in advance. 620 mWriter.queueInputImage(inputImages[i]); 621 } 622 623 // Submit the requests 624 for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) { 625 mSession.capture(reprocessReqs[i].build(), null, null); 626 } 627 628 // Get images 629 startTimeMs = SystemClock.elapsedRealtime(); 630 Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES]; 631 for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) { 632 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 633 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs; 634 startTimeMs = SystemClock.elapsedRealtime(); 635 } 636 for (Image i : jpegImages) { 637 i.close(); 638 } 639 } else { 640 // sync capture: issue reprocess request one by one, only submit next one when 641 // the previous capture image is returned. This is to test the back to back capture 642 // performance. 643 Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES]; 644 for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) { 645 startTimeMs = SystemClock.elapsedRealtime(); 646 mWriter.queueInputImage(inputImages[i]); 647 mSession.capture(reprocessReqs[i].build(), null, null); 648 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 649 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs; 650 } 651 for (Image i : jpegImages) { 652 i.close(); 653 } 654 } 655 656 stopZslStreaming(); 657 658 String reprocessType = "YUV reprocessing"; 659 if (reprocessInputFormat == ImageFormat.PRIVATE) { 660 reprocessType = "opaque reprocessing"; 661 } 662 663 // Report the performance data 664 String captureMsg; 665 if (asyncMode) { 666 captureMsg = "capture latency"; 667 if (requireHighQuality) { 668 captureMsg += " for High Quality noise reduction and edge modes"; 669 } 670 mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, 671 ResultUnit.NONE); 672 mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL, 673 ResultUnit.NONE); 674 mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER, 675 ResultUnit.MS); 676 mReportLog.setSummary("camera_reprocessing_average_latency", 677 Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS); 678 } else { 679 captureMsg = "shot to shot latency"; 680 if (requireHighQuality) { 681 captureMsg += " for High Quality noise reduction and edge modes"; 682 } 683 mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, 684 ResultUnit.NONE); 685 mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL, 686 ResultUnit.NONE); 687 mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER, 688 ResultUnit.MS); 689 mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency", 690 Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS); 691 } 692 } 693 694 /** 695 * Start preview and ZSL streaming 696 */ startZslStreaming()697 private void startZslStreaming() throws Exception { 698 CaptureRequest.Builder zslBuilder = 699 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG); 700 zslBuilder.addTarget(mPreviewSurface); 701 zslBuilder.addTarget(mCameraZslReader.getSurface()); 702 mSession.setRepeatingRequest(zslBuilder.build(), mZslResultListener, mHandler); 703 } 704 stopZslStreaming()705 private void stopZslStreaming() throws Exception { 706 mSession.stopRepeating(); 707 mSessionListener.getStateWaiter().waitForState( 708 BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS); 709 } 710 711 /** 712 * Wait for a certain number of frames, the images and results will be drained from the 713 * listeners to make sure that next reprocessing can get matched results and images. 714 * 715 * @param numFrameWait The number of frames to wait before return, 0 means that 716 * this call returns immediately after streaming on. 717 */ waitForFrames(int numFrameWait)718 private void waitForFrames(int numFrameWait) throws Exception { 719 if (numFrameWait < 0) { 720 throw new IllegalArgumentException("numFrameWait " + numFrameWait + 721 " should be non-negative"); 722 } 723 724 for (int i = 0; i < numFrameWait; i++) { 725 mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close(); 726 } 727 } 728 closeReaderWriters()729 private void closeReaderWriters() { 730 mCameraZslImageListener.drain(); 731 CameraTestUtils.closeImageReader(mCameraZslReader); 732 mCameraZslReader = null; 733 mJpegListener.drain(); 734 CameraTestUtils.closeImageReader(mJpegReader); 735 mJpegReader = null; 736 CameraTestUtils.closeImageWriter(mWriter); 737 mWriter = null; 738 } 739 prepareReprocessCapture(int inputFormat)740 private void prepareReprocessCapture(int inputFormat) 741 throws CameraAccessException { 742 // 1. Find the right preview and capture sizes. 743 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 744 Size[] supportedInputSizes = 745 mStaticInfo.getAvailableSizesForFormatChecked(inputFormat, 746 StaticMetadata.StreamDirection.Input); 747 Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes); 748 Size maxJpegSize = mOrderedStillSizes.get(0); 749 updatePreviewSurface(maxPreviewSize); 750 mZslResultListener = new SimpleCaptureCallback(); 751 752 // 2. Create camera output ImageReaders. 753 // YUV/Opaque output, camera should support output with input size/format 754 mCameraZslImageListener = new SimpleImageReaderListener( 755 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES); 756 mCameraZslReader = CameraTestUtils.makeImageReader( 757 maxInputSize, inputFormat, MAX_ZSL_IMAGES, mCameraZslImageListener, mHandler); 758 // Jpeg reprocess output 759 mJpegListener = new SimpleImageReaderListener(); 760 mJpegReader = CameraTestUtils.makeImageReader( 761 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES, mJpegListener, mHandler); 762 763 // create camera reprocess session 764 List<Surface> outSurfaces = new ArrayList<Surface>(); 765 outSurfaces.add(mPreviewSurface); 766 outSurfaces.add(mCameraZslReader.getSurface()); 767 outSurfaces.add(mJpegReader.getSurface()); 768 InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(), 769 maxInputSize.getHeight(), inputFormat); 770 mSessionListener = new BlockingSessionCallback(); 771 mSession = CameraTestUtils.configureReprocessableCameraSession( 772 mCamera, inputConfig, outSurfaces, mSessionListener, mHandler); 773 774 // 3. Create ImageWriter for input 775 mWriter = CameraTestUtils.makeImageWriter( 776 mSession.getInputSurface(), MAX_INPUT_IMAGES, /*listener*/null, /*handler*/null); 777 778 } 779 blockingStopPreview()780 private void blockingStopPreview() throws Exception { 781 stopPreview(); 782 mSessionListener.getStateWaiter().waitForState(SESSION_CLOSED, 783 CameraTestUtils.SESSION_CLOSE_TIMEOUT_MS); 784 } 785 blockingStartPreview(CaptureCallback listener, SimpleImageListener imageListener)786 private void blockingStartPreview(CaptureCallback listener, SimpleImageListener imageListener) 787 throws Exception { 788 if (mPreviewSurface == null || mReaderSurface == null) { 789 throw new IllegalStateException("preview and reader surface must be initilized first"); 790 } 791 792 CaptureRequest.Builder previewBuilder = 793 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 794 if (mStaticInfo.isColorOutputSupported()) { 795 previewBuilder.addTarget(mPreviewSurface); 796 } 797 previewBuilder.addTarget(mReaderSurface); 798 mSession.setRepeatingRequest(previewBuilder.build(), listener, mHandler); 799 imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS); 800 } 801 802 /** 803 * Configure reader and preview outputs and wait until done. 804 */ configureReaderAndPreviewOutputs()805 private void configureReaderAndPreviewOutputs() throws Exception { 806 if (mPreviewSurface == null || mReaderSurface == null) { 807 throw new IllegalStateException("preview and reader surface must be initilized first"); 808 } 809 mSessionListener = new BlockingSessionCallback(); 810 List<Surface> outputSurfaces = new ArrayList<>(); 811 if (mStaticInfo.isColorOutputSupported()) { 812 outputSurfaces.add(mPreviewSurface); 813 } 814 outputSurfaces.add(mReaderSurface); 815 mSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces, 816 mSessionListener, mHandler); 817 } 818 819 /** 820 * Initialize the ImageReader instance and preview surface. 821 * @param cameraId The camera to be opened. 822 * @param format The format used to create ImageReader instance. 823 */ initializeImageReader(String cameraId, int format)824 private void initializeImageReader(String cameraId, int format) throws Exception { 825 mOrderedPreviewSizes = CameraTestUtils.getSortedSizesForFormat( 826 cameraId, mCameraManager, format, 827 CameraTestUtils.getPreviewSizeBound(mWindowManager, 828 CameraTestUtils.PREVIEW_SIZE_BOUND)); 829 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 830 createImageReader(maxPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null); 831 updatePreviewSurface(maxPreviewSize); 832 } 833 simpleOpenCamera(String cameraId)834 private void simpleOpenCamera(String cameraId) throws Exception { 835 mCamera = CameraTestUtils.openCamera( 836 mCameraManager, cameraId, mCameraListener, mHandler); 837 mCollector.setCameraId(cameraId); 838 mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId), 839 CheckLevel.ASSERT, /*collector*/null); 840 mMinPreviewFrameDurationMap = 841 mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888); 842 } 843 844 /** 845 * Simple image listener that can be used to time the availability of first image. 846 * 847 */ 848 private static class SimpleImageListener implements ImageReader.OnImageAvailableListener { 849 private ConditionVariable imageAvailable = new ConditionVariable(); 850 private boolean imageReceived = false; 851 private long mTimeReceivedImage = 0; 852 853 @Override onImageAvailable(ImageReader reader)854 public void onImageAvailable(ImageReader reader) { 855 Image image = null; 856 if (!imageReceived) { 857 if (VERBOSE) { 858 Log.v(TAG, "First image arrives"); 859 } 860 imageReceived = true; 861 mTimeReceivedImage = SystemClock.elapsedRealtime(); 862 imageAvailable.open(); 863 } 864 image = reader.acquireNextImage(); 865 if (image != null) { 866 image.close(); 867 } 868 } 869 870 /** 871 * Wait for image available, return immediately if the image was already 872 * received, otherwise wait until an image arrives. 873 */ waitForImageAvailable(long timeout)874 public void waitForImageAvailable(long timeout) { 875 if (imageReceived) { 876 imageReceived = false; 877 return; 878 } 879 880 if (imageAvailable.block(timeout)) { 881 imageAvailable.close(); 882 imageReceived = false; 883 } else { 884 throw new TimeoutRuntimeException("Unable to get the first image after " 885 + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms"); 886 } 887 } 888 getTimeReceivedImage()889 public long getTimeReceivedImage() { 890 return mTimeReceivedImage; 891 } 892 } 893 894 private static class SimpleTimingResultListener 895 extends CameraCaptureSession.CaptureCallback { 896 private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue = 897 new LinkedBlockingQueue<Pair<CaptureResult, Long> >(); 898 private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue = 899 new LinkedBlockingQueue<Pair<CaptureResult, Long> > (); 900 901 @Override onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)902 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 903 TotalCaptureResult result) { 904 try { 905 Long time = SystemClock.elapsedRealtime(); 906 mResultQueue.put(new Pair<CaptureResult, Long>(result, time)); 907 } catch (InterruptedException e) { 908 throw new UnsupportedOperationException( 909 "Can't handle InterruptedException in onCaptureCompleted"); 910 } 911 } 912 913 @Override onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult)914 public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, 915 CaptureResult partialResult) { 916 try { 917 // check if AE and AF state exists 918 Long time = -1L; 919 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null && 920 partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) { 921 time = SystemClock.elapsedRealtime(); 922 } 923 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time)); 924 } catch (InterruptedException e) { 925 throw new UnsupportedOperationException( 926 "Can't handle InterruptedException in onCaptureProgressed"); 927 } 928 } 929 getPartialResultNTime(long timeout)930 public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) { 931 try { 932 Pair<CaptureResult, Long> result = 933 mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS); 934 return result; 935 } catch (InterruptedException e) { 936 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 937 } 938 } 939 getCaptureResultNTime(long timeout)940 public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) { 941 try { 942 Pair<CaptureResult, Long> result = 943 mResultQueue.poll(timeout, TimeUnit.MILLISECONDS); 944 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result); 945 return result; 946 } catch (InterruptedException e) { 947 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 948 } 949 } 950 getPartialResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)951 public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest, 952 int numResultsWait) { 953 if (numResultsWait < 0) { 954 throw new IllegalArgumentException("numResultsWait must be no less than 0"); 955 } 956 957 Pair<CaptureResult, Long> result; 958 int i = 0; 959 do { 960 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS); 961 // The result may be null if no partials are produced on this particular path, so 962 // stop trying 963 if (result == null) break; 964 if (result.first.getRequest().equals(myRequest)) { 965 return result; 966 } 967 } while (i++ < numResultsWait); 968 969 // No partials produced - this may not be an error, since a given device may not 970 // produce any partials on this testing path 971 return null; 972 } 973 getCaptureResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)974 public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest, 975 int numResultsWait) { 976 if (numResultsWait < 0) { 977 throw new IllegalArgumentException("numResultsWait must be no less than 0"); 978 } 979 980 Pair<CaptureResult, Long> result; 981 int i = 0; 982 do { 983 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS); 984 if (result.first.getRequest().equals(myRequest)) { 985 return result; 986 } 987 } while (i++ < numResultsWait); 988 989 throw new TimeoutRuntimeException("Unable to get the expected capture result after " 990 + "waiting for " + numResultsWait + " results"); 991 } 992 993 } 994 } 995