1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.REPORT_LOG_NAME;
20 
21 import static org.junit.Assert.assertNotNull;
22 import static org.junit.Assert.assertTrue;
23 
24 import android.app.Instrumentation;
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.graphics.SurfaceTexture;
28 import android.hardware.HardwareBuffer;
29 import android.hardware.camera2.CameraAccessException;
30 import android.hardware.camera2.CameraCaptureSession;
31 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraMetadata;
35 import android.hardware.camera2.CaptureRequest;
36 import android.hardware.camera2.CaptureResult;
37 import android.hardware.camera2.TotalCaptureResult;
38 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
39 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
40 import android.hardware.camera2.cts.helpers.StaticMetadata;
41 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
42 import android.hardware.camera2.cts.testcases.Camera2AndroidTestRule;
43 import android.hardware.camera2.params.InputConfiguration;
44 import android.hardware.camera2.params.OutputConfiguration;
45 import android.hardware.camera2.params.StreamConfigurationMap;
46 import android.media.Image;
47 import android.media.ImageReader;
48 import android.media.ImageWriter;
49 import android.os.Build;
50 import android.os.Bundle;
51 import android.os.ConditionVariable;
52 import android.os.SystemClock;
53 import android.util.Log;
54 import android.util.Pair;
55 import android.util.Range;
56 import android.util.Size;
57 import android.view.Surface;
58 
59 import androidx.test.InstrumentationRegistry;
60 import androidx.test.rule.ActivityTestRule;
61 
62 import com.android.compatibility.common.util.DeviceReportLog;
63 import com.android.compatibility.common.util.PropertyUtil;
64 import com.android.compatibility.common.util.ResultType;
65 import com.android.compatibility.common.util.ResultUnit;
66 import com.android.compatibility.common.util.Stat;
67 import com.android.ex.camera2.blocking.BlockingSessionCallback;
68 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
69 
70 import org.junit.Rule;
71 import org.junit.Test;
72 import org.junit.runner.RunWith;
73 import org.junit.runners.JUnit4;
74 
75 import java.util.ArrayList;
76 import java.util.Arrays;
77 import java.util.Collections;
78 import java.util.List;
79 import java.util.ListIterator;
80 import java.util.Objects;
81 import java.util.concurrent.LinkedBlockingQueue;
82 import java.util.concurrent.TimeUnit;
83 
84 /**
85  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
86  * shutter lag etc. The KPI data will be reported in cts results.
87  */
88 @RunWith(JUnit4.class)
89 public class PerformanceTest {
90     private static final String TAG = "PerformanceTest";
91     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
92     private static final int NUM_TEST_LOOPS = 10;
93     private static final int NUM_MAX_IMAGES = 4;
94     private static final int NUM_RESULTS_WAIT = 30;
95     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
96     private final int MAX_REPROCESS_IMAGES = 6;
97     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
98     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
99     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
100     // count to maintain reasonable number of candidate image for the worse-case.
101     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
102     private final double REPROCESS_STALL_MARGIN = 0.1;
103     private static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
104     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
105     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
106     private static final long FRAME_DURATION_NS_30FPS = 33333333L;
107     private static final int NUM_ZOOM_STEPS = 10;
108     private static final String HAS_ACTIVITY_ARG_KEY = "has-activity";
109 
110     private DeviceReportLog mReportLog;
111 
112     // Used for reading camera output buffers.
113     private ImageReader mCameraZslReader;
114     private SimpleImageReaderListener mCameraZslImageListener;
115     // Used for reprocessing (jpeg) output.
116     private ImageReader mJpegReader;
117     private SimpleImageReaderListener mJpegListener;
118     // Used for reprocessing input.
119     private ImageWriter mWriter;
120     private SimpleCaptureCallback mZslResultListener;
121 
122     private Size mPreviewSize;
123     private Surface mPreviewSurface;
124     private SurfaceTexture mPreviewSurfaceTexture;
125     private int mImageReaderFormat;
126 
127     private static final Instrumentation mInstrumentation =
128             InstrumentationRegistry.getInstrumentation();
129     private static final Context mContext = InstrumentationRegistry.getTargetContext();
130 
131     @Rule
132     public final Camera2AndroidTestRule mTestRule = new Camera2AndroidTestRule(mContext);
133 
134     // b/284352937: Display an activity with SurfaceView so that camera's effect on refresh
135     // rate takes precedence.
136     //
137     // - If no activity is displayed, home screen would vote for a completely different refresh
138     // rate. Some examples are 24hz and 144hz. These doesn't reflect the actual refresh rate
139     // when camera runs with a SurfaceView.
140     // - The testSurfaceViewJitterReduction needs to read timestamps for each output image. If
141     // we directly connect camera to SurfaceView, we won't have access to timestamps.
142     //
143     // So the solution is that if no activity already exists, create an activity with SurfaceView,
144     // but not connect it to camera.
145     @Rule
146     public final ActivityTestRule<Camera2SurfaceViewCtsActivity> mActivityRule =
147             createActivityRuleIfNeeded();
148 
createActivityRuleIfNeeded()149     private static ActivityTestRule<Camera2SurfaceViewCtsActivity> createActivityRuleIfNeeded() {
150         Bundle bundle = InstrumentationRegistry.getArguments();
151         byte hasActivity = bundle.getByte(HAS_ACTIVITY_ARG_KEY);
152 
153         // If the caller already has an activity, do not create the ActivityTestRule.
154         if (hasActivity != 0) {
155             return null;
156         } else {
157             return new ActivityTestRule<>(Camera2SurfaceViewCtsActivity.class);
158         }
159     }
160 
161     /**
162      * Test camera launch KPI: the time duration between a camera device is
163      * being opened and first preview frame is available.
164      * <p>
165      * It includes camera open time, session creation time, and sending first
166      * preview request processing latency etc. For the SurfaceView based preview use
167      * case, there is no way for client to know the exact preview frame
168      * arrival time. To approximate this time, a companion YUV420_888 stream is
169      * created. The first YUV420_888 Image coming out of the ImageReader is treated
170      * as the first preview arrival time.</p>
171      * <p>
172      * For depth-only devices, timing is done with the DEPTH16 format instead.
173      * </p>
174      */
175     @Test
testCameraLaunch()176     public void testCameraLaunch() throws Exception {
177         double[] avgCameraLaunchTimes = new double[mTestRule.getCameraIdsUnderTest().length];
178 
179         int counter = 0;
180         for (String id : mTestRule.getCameraIdsUnderTest()) {
181             // Do NOT move these variables to outer scope
182             // They will be passed to DeviceReportLog and their references will be stored
183             String streamName = "test_camera_launch";
184             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
185             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
186             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
187             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
188             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
189             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
190             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
191             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
192             try {
193                 CameraCharacteristics ch =
194                         mTestRule.getCameraManager().getCameraCharacteristics(id);
195                 mTestRule.setStaticInfo(new StaticMetadata(ch));
196                 boolean isColorOutputSupported = mTestRule.getStaticInfo().isColorOutputSupported();
197                 if (isColorOutputSupported) {
198                     initializeImageReader(id, ImageFormat.YUV_420_888);
199                 } else {
200                     assertTrue("Depth output must be supported if regular output isn't!",
201                             mTestRule.getStaticInfo().isDepthOutputSupported());
202                     initializeImageReader(id, ImageFormat.DEPTH16);
203                 }
204                 updatePreviewSurface(mPreviewSize);
205 
206                 SimpleImageListener imageListener = null;
207                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
208                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
209                     try {
210                         // Need create a new listener every iteration to be able to wait
211                         // for the first image comes out.
212                         imageListener = new SimpleImageListener();
213                         mTestRule.getReader().setOnImageAvailableListener(
214                                 imageListener, mTestRule.getHandler());
215                         startTimeMs = SystemClock.elapsedRealtime();
216 
217                         // Blocking open camera
218                         simpleOpenCamera(id);
219                         openTimeMs = SystemClock.elapsedRealtime();
220                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
221 
222                         // Blocking configure outputs.
223                         CaptureRequest previewRequest =
224                                 configureReaderAndPreviewOutputs(id, isColorOutputSupported);
225                         configureTimeMs = SystemClock.elapsedRealtime();
226                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
227 
228                         // Blocking start preview (start preview to first image arrives)
229                         SimpleCaptureCallback resultListener =
230                                 new SimpleCaptureCallback();
231                         blockingStartPreview(id, resultListener, previewRequest, imageListener);
232                         previewStartedTimeMs = SystemClock.elapsedRealtime();
233                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
234                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
235 
236                         // Let preview on for a couple of frames
237                         CameraTestUtils.waitForNumResults(resultListener, NUM_RESULTS_WAIT,
238                                 WAIT_FOR_RESULT_TIMEOUT_MS);
239 
240                         // Blocking stop preview
241                         startTimeMs = SystemClock.elapsedRealtime();
242                         blockingStopRepeating();
243                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
244                     }
245                     finally {
246                         // Blocking camera close
247                         startTimeMs = SystemClock.elapsedRealtime();
248                         mTestRule.closeDevice(id);
249                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
250                     }
251                 }
252 
253                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
254                 // Finish the data collection, report the KPIs.
255                 // ReportLog keys have to be lowercase underscored format.
256                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
257                         ResultUnit.MS);
258                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
259                         ResultType.LOWER_BETTER, ResultUnit.MS);
260                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
261                         ResultType.LOWER_BETTER, ResultUnit.MS);
262                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
263                         ResultType.LOWER_BETTER, ResultUnit.MS);
264                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
265                         ResultType.LOWER_BETTER, ResultUnit.MS);
266                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
267                         ResultType.LOWER_BETTER, ResultUnit.MS);
268             }
269             finally {
270                 mTestRule.closeDefaultImageReader();
271                 closePreviewSurface();
272             }
273             counter++;
274             mReportLog.submit(mInstrumentation);
275 
276             if (VERBOSE) {
277                 Log.v(TAG, "Camera " + id + " device open times(ms): "
278                         + Arrays.toString(cameraOpenTimes)
279                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
280                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
281                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
282                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
283                         + Arrays.toString(configureStreamTimes)
284                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
285                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
286                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
287                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
288                         + Arrays.toString(startPreviewTimes)
289                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
290                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
291                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
292                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
293                         + Arrays.toString(stopPreviewTimes)
294                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
295                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
296                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
297                 Log.v(TAG, "Camera " + id + " device close times(ms): "
298                         + Arrays.toString(cameraCloseTimes)
299                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
300                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
301                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
302                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
303                         + Arrays.toString(cameraLaunchTimes)
304                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
305                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
306                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
307             }
308         }
309         if (mTestRule.getCameraIdsUnderTest().length != 0) {
310             String streamName = "test_camera_launch_average";
311             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
312             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
313                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
314             mReportLog.submit(mInstrumentation);
315         }
316     }
317 
318     /**
319      * Test camera capture KPI for YUV_420_888, PRIVATE, JPEG, RAW and RAW+JPEG
320      * formats: the time duration between sending out a single image capture request
321      * and receiving image data and capture result.
322      * <p>
323      * It enumerates the following metrics: capture latency, computed by
324      * measuring the time between sending out the capture request and getting
325      * the image data; partial result latency, computed by measuring the time
326      * between sending out the capture request and getting the partial result;
327      * capture result latency, computed by measuring the time between sending
328      * out the capture request and getting the full capture result.
329      * </p>
330      */
331     @Test
testSingleCapture()332     public void testSingleCapture() throws Exception {
333         int[] JPEG_FORMAT = {ImageFormat.JPEG};
334         testSingleCaptureForFormat(JPEG_FORMAT, "jpeg", /*addPreviewDelay*/ true);
335         if (!mTestRule.isPerfMeasure()) {
336             int[] JPEG_R_FORMAT = {ImageFormat.JPEG_R};
337             testSingleCaptureForFormat(JPEG_R_FORMAT, "jpeg_r", /*addPreviewDelay*/ true,
338                     /*enablePostview*/ false);
339             int[] YUV_FORMAT = {ImageFormat.YUV_420_888};
340             testSingleCaptureForFormat(YUV_FORMAT, null, /*addPreviewDelay*/ true);
341             int[] PRIVATE_FORMAT = {ImageFormat.PRIVATE};
342             testSingleCaptureForFormat(PRIVATE_FORMAT, "private", /*addPreviewDelay*/ true);
343             int[] RAW_FORMAT = {ImageFormat.RAW_SENSOR};
344             testSingleCaptureForFormat(RAW_FORMAT, "raw", /*addPreviewDelay*/ true);
345             int[] RAW_JPEG_FORMATS = {ImageFormat.RAW_SENSOR, ImageFormat.JPEG};
346             testSingleCaptureForFormat(RAW_JPEG_FORMATS, "raw_jpeg", /*addPreviewDelay*/ true);
347         }
348     }
349 
appendFormatDescription(String message, String formatDescription)350     private String appendFormatDescription(String message, String formatDescription) {
351         if (message == null) {
352             return null;
353         }
354 
355         String ret = message;
356         if (formatDescription != null) {
357             ret = String.format(ret + "_%s", formatDescription);
358         }
359 
360         return ret;
361     }
362 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay)363     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
364             boolean addPreviewDelay) throws Exception {
365        testSingleCaptureForFormat(formats, formatDescription, addPreviewDelay,
366                /*enablePostview*/ true);
367     }
368 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay, boolean enablePostview)369     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
370             boolean addPreviewDelay, boolean enablePostview) throws Exception {
371         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
372         double[] avgCaptureTimes = new double[mTestRule.getCameraIdsUnderTest().length];
373 
374         int counter = 0;
375         for (String id : mTestRule.getCameraIdsUnderTest()) {
376             // Do NOT move these variables to outer scope
377             // They will be passed to DeviceReportLog and their references will be stored
378             String streamName = appendFormatDescription("test_single_capture", formatDescription);
379             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
380             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
381             double[] captureTimes = new double[NUM_TEST_LOOPS];
382             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
383             double[] getResultTimes = new double[NUM_TEST_LOOPS];
384             ImageReader[] readers = null;
385             try {
386                 if (!mTestRule.getAllStaticInfo().get(id).isColorOutputSupported()) {
387                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
388                     continue;
389                 }
390 
391                 StreamConfigurationMap configMap = mTestRule.getAllStaticInfo().get(
392                         id).getCharacteristics().get(
393                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
394                 boolean formatsSupported = true;
395                 for (int format : formats) {
396                     if (!configMap.isOutputSupportedFor(format)) {
397                         Log.i(TAG, "Camera " + id + " does not support output format: " + format +
398                                 " skipping");
399                         formatsSupported = false;
400                         break;
401                     }
402                 }
403                 if (!formatsSupported) {
404                     continue;
405                 }
406 
407                 mTestRule.openDevice(id);
408 
409                 boolean partialsExpected = mTestRule.getStaticInfo().getPartialResultCount() > 1;
410                 long startTimeMs;
411                 boolean isPartialTimingValid = partialsExpected;
412                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
413 
414                     // setup builders and listeners
415                     CaptureRequest.Builder previewBuilder =
416                             mTestRule.getCamera().createCaptureRequest(
417                                     CameraDevice.TEMPLATE_PREVIEW);
418                     CaptureRequest.Builder captureBuilder =
419                             mTestRule.getCamera().createCaptureRequest(
420                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
421                     SimpleCaptureCallback previewResultListener =
422                             new SimpleCaptureCallback();
423                     SimpleTimingResultListener captureResultListener =
424                             new SimpleTimingResultListener();
425                     SimpleImageListener[] imageListeners = new SimpleImageListener[formats.length];
426                     Size[] imageSizes = new Size[formats.length];
427                     for (int j = 0; j < formats.length; j++) {
428                         Size sizeBound = mTestRule.isPerfClassTest() ? new Size(1920, 1080) : null;
429                         imageSizes[j] = CameraTestUtils.getSortedSizesForFormat(
430                                 id,
431                                 mTestRule.getCameraManager(),
432                                 formats[j],
433                                 sizeBound).get(0);
434                         imageListeners[j] = new SimpleImageListener();
435                     }
436 
437                     readers = prepareStillCaptureAndStartPreview(id, previewBuilder, captureBuilder,
438                             mTestRule.getOrderedPreviewSizes().get(0), imageSizes, formats,
439                             previewResultListener, NUM_MAX_IMAGES, imageListeners, enablePostview);
440 
441                     if (addPreviewDelay) {
442                         Thread.sleep(500);
443                     }
444 
445                     // Capture an image and get image data
446                     startTimeMs = SystemClock.elapsedRealtime();
447                     CaptureRequest request = captureBuilder.build();
448                     mTestRule.getCameraSession().capture(
449                             request, captureResultListener, mTestRule.getHandler());
450 
451                     Pair<CaptureResult, Long> partialResultNTime = null;
452                     if (partialsExpected) {
453                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
454                                 request, NUM_RESULTS_WAIT);
455                         // Even if maxPartials > 1, may not see partials for some devices
456                         if (partialResultNTime == null) {
457                             partialsExpected = false;
458                             isPartialTimingValid = false;
459                         }
460                     }
461                     Pair<CaptureResult, Long> captureResultNTime =
462                             captureResultListener.getCaptureResultNTimeForRequest(
463                                     request, NUM_RESULTS_WAIT);
464 
465                     double [] imageTimes = new double[formats.length];
466                     for (int j = 0; j < formats.length; j++) {
467                         imageListeners[j].waitForImageAvailable(
468                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
469                         imageTimes[j] = imageListeners[j].getTimeReceivedImage();
470                     }
471 
472                     captureTimes[i] = Stat.getAverage(imageTimes) - startTimeMs;
473                     if (partialsExpected) {
474                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
475                         if (getPartialTimes[i] < 0) {
476                             isPartialTimingValid = false;
477                         }
478                     }
479                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
480 
481                     // simulate real scenario (preview runs a bit)
482                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
483                             WAIT_FOR_RESULT_TIMEOUT_MS);
484 
485                     blockingStopRepeating();
486 
487                     CameraTestUtils.closeImageReaders(readers);
488                     readers = null;
489                 }
490                 String message = appendFormatDescription("camera_capture_latency",
491                         formatDescription);
492                 mReportLog.addValues(message, captureTimes, ResultType.LOWER_BETTER, ResultUnit.MS);
493                 // If any of the partial results do not contain AE and AF state, then no report
494                 if (isPartialTimingValid) {
495                     message = appendFormatDescription("camera_partial_result_latency",
496                             formatDescription);
497                     mReportLog.addValues(message, getPartialTimes, ResultType.LOWER_BETTER,
498                             ResultUnit.MS);
499                 }
500                 message = appendFormatDescription("camera_capture_result_latency",
501                         formatDescription);
502                 mReportLog.addValues(message, getResultTimes, ResultType.LOWER_BETTER,
503                         ResultUnit.MS);
504 
505                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
506                 avgCaptureTimes[counter] = Stat.getAverage(captureTimes);
507             }
508             finally {
509                 CameraTestUtils.closeImageReaders(readers);
510                 readers = null;
511                 mTestRule.closeDevice(id);
512                 closePreviewSurface();
513             }
514             counter++;
515             mReportLog.submit(mInstrumentation);
516         }
517 
518         // Result will not be reported in CTS report if no summary is printed.
519         if (mTestRule.getCameraIdsUnderTest().length != 0) {
520             String streamName = appendFormatDescription("test_single_capture_average",
521                     formatDescription);
522             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
523             // In performance measurement mode, capture the buffer latency rather than result
524             // latency.
525             if (mTestRule.isPerfMeasure()) {
526                 String message = appendFormatDescription(
527                         "camera_capture_average_latency_for_all_cameras", formatDescription);
528                 mReportLog.setSummary(message, Stat.getAverage(avgCaptureTimes),
529                         ResultType.LOWER_BETTER, ResultUnit.MS);
530             } else {
531                 String message = appendFormatDescription(
532                         "camera_capture_result_average_latency_for_all_cameras", formatDescription);
533                 mReportLog.setSummary(message, Stat.getAverage(avgResultTimes),
534                         ResultType.LOWER_BETTER, ResultUnit.MS);
535             }
536             mReportLog.submit(mInstrumentation);
537         }
538     }
539 
540     /**
541      * Test multiple capture KPI for YUV_420_888 format: the average time duration
542      * between sending out image capture requests and receiving capture results.
543      * <p>
544      * It measures capture latency, which is the time between sending out the capture
545      * request and getting the full capture result, and the frame duration, which is the timestamp
546      * gap between results.
547      * </p>
548      */
549     @Test
testMultipleCapture()550     public void testMultipleCapture() throws Exception {
551         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
552         double[] avgDurationMs = new double[mTestRule.getCameraIdsUnderTest().length];
553 
554         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
555         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
556             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
557             private int captureQueueEmptied = 0;
558 
559             @Override
560             public void onConfigured(CameraCaptureSession session) {
561                 // Empty implementation
562             }
563 
564             @Override
565             public void onConfigureFailed(CameraCaptureSession session) {
566                 // Empty implementation
567             }
568 
569             @Override
570             public void onCaptureQueueEmpty(CameraCaptureSession session) {
571                 captureQueueEmptied++;
572                 if (VERBOSE) {
573                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
574                             + captureQueueEmptied);
575                 }
576 
577                 captureQueueEmptyCond.open();
578             }
579 
580             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
581              * already received, otherwise, wait for one to arrive. */
582             public void waitForCaptureQueueEmpty(long timeout) {
583                 if (captureQueueEmptied > 0) {
584                     captureQueueEmptied--;
585                     return;
586                 }
587 
588                 if (captureQueueEmptyCond.block(timeout)) {
589                     captureQueueEmptyCond.close();
590                     captureQueueEmptied = 0;
591                 } else {
592                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
593                             + timeout + "ms");
594                 }
595             }
596         }
597 
598         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
599 
600         int counter = 0;
601         for (String id : mTestRule.getCameraIdsUnderTest()) {
602             // Do NOT move these variables to outer scope
603             // They will be passed to DeviceReportLog and their references will be stored
604             String streamName = "test_multiple_capture";
605             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
606             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
607             long[] startTimes = new long[NUM_MAX_IMAGES];
608             double[] getResultTimes = new double[NUM_MAX_IMAGES];
609             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
610             try {
611                 StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
612                 if (!staticMetadata.isColorOutputSupported()) {
613                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
614                     continue;
615                 }
616                 boolean useSessionKeys = isFpsRangeASessionKey(staticMetadata.getCharacteristics());
617 
618                 mTestRule.openDevice(id);
619                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
620 
621                     // setup builders and listeners
622                     CaptureRequest.Builder previewBuilder =
623                             mTestRule.getCamera().createCaptureRequest(
624                                     CameraDevice.TEMPLATE_PREVIEW);
625                     CaptureRequest.Builder captureBuilder =
626                             mTestRule.getCamera().createCaptureRequest(
627                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
628                     SimpleCaptureCallback previewResultListener =
629                             new SimpleCaptureCallback();
630                     SimpleTimingResultListener captureResultListener =
631                             new SimpleTimingResultListener();
632                     SimpleImageReaderListener imageListener =
633                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
634 
635                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
636                             id, mTestRule.getCameraManager(),
637                             ImageFormat.YUV_420_888, /*bound*/null).get(0);
638                     // Find minimum frame duration for YUV_420_888
639                     StreamConfigurationMap config =
640                             mTestRule.getStaticInfo().getCharacteristics().get(
641                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
642 
643                     final long minStillFrameDuration =
644                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
645                     if (minStillFrameDuration > 0) {
646                         Range<Integer> targetRange =
647                                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
648                                         minStillFrameDuration, mTestRule.getStaticInfo());
649                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
650                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
651                     }
652 
653                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
654                             mTestRule.getOrderedPreviewSizes().get(0), maxYuvSize,
655                             ImageFormat.YUV_420_888, previewResultListener,
656                             sessionListener, NUM_MAX_IMAGES, imageListener,
657                             useSessionKeys);
658 
659                     // Converge AE
660                     CameraTestUtils.waitForAeStable(previewResultListener,
661                             NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY, mTestRule.getStaticInfo(),
662                             WAIT_FOR_RESULT_TIMEOUT_MS, NUM_RESULTS_WAIT_TIMEOUT);
663 
664                     if (mTestRule.getStaticInfo().isAeLockSupported()) {
665                         // Lock AE if possible to improve stability
666                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
667                         mTestRule.getCameraSession().setRepeatingRequest(previewBuilder.build(),
668                                 previewResultListener, mTestRule.getHandler());
669                         if (mTestRule.getStaticInfo().isHardwareLevelAtLeastLimited()) {
670                             // Legacy mode doesn't output AE state
671                             CameraTestUtils.waitForResultValue(previewResultListener,
672                                     CaptureResult.CONTROL_AE_STATE,
673                                     CaptureResult.CONTROL_AE_STATE_LOCKED,
674                                     NUM_RESULTS_WAIT_TIMEOUT, WAIT_FOR_RESULT_TIMEOUT_MS);
675                         }
676                     }
677 
678                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
679                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
680 
681                         // Capture an image and get image data
682                         startTimes[j] = SystemClock.elapsedRealtime();
683                         CaptureRequest request = captureBuilder.build();
684                         mTestRule.getCameraSession().capture(
685                                 request, captureResultListener, mTestRule.getHandler());
686 
687                         // Wait for capture queue empty for the current request
688                         sessionListener.waitForCaptureQueueEmpty(
689                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
690                     }
691 
692                     // Acquire the capture result time and frame duration
693                     long prevTimestamp = -1;
694                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
695                         Pair<CaptureResult, Long> captureResultNTime =
696                                 captureResultListener.getCaptureResultNTime(
697                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
698 
699                         getResultTimes[j] +=
700                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
701 
702                         // Collect inter-frame timestamp
703                         long timestamp = captureResultNTime.first.get(
704                                 CaptureResult.SENSOR_TIMESTAMP);
705                         if (prevTimestamp != -1) {
706                             frameDurationMs[j-1] +=
707                                     (double)(timestamp - prevTimestamp)/(
708                                             NUM_TEST_LOOPS * 1000000.0);
709                         }
710                         prevTimestamp = timestamp;
711                     }
712 
713                     // simulate real scenario (preview runs a bit)
714                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
715                             WAIT_FOR_RESULT_TIMEOUT_MS);
716 
717                     stopRepeating();
718                 }
719 
720                 for (int i = 0; i < getResultTimes.length; i++) {
721                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
722                             getResultTimes[i] + " ms");
723                 }
724                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
725                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
726                             frameDurationMs[i] + " ms");
727                 }
728 
729                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
730                         ResultType.LOWER_BETTER, ResultUnit.MS);
731                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
732                         ResultType.LOWER_BETTER, ResultUnit.MS);
733 
734 
735                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
736                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
737             }
738             finally {
739                 mTestRule.closeDefaultImageReader();
740                 mTestRule.closeDevice(id);
741                 closePreviewSurface();
742             }
743             counter++;
744             mReportLog.submit(mInstrumentation);
745         }
746 
747         // Result will not be reported in CTS report if no summary is printed.
748         if (mTestRule.getCameraIdsUnderTest().length != 0) {
749             String streamName = "test_multiple_capture_average";
750             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
751             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
752                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
753             mReportLog.submit(mInstrumentation);
754             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
755             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
756                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
757             mReportLog.submit(mInstrumentation);
758         }
759     }
760 
761     /**
762      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
763      * a reprocess request is issued to the time the reprocess image is returned.
764      */
765     @Test
testReprocessingLatency()766     public void testReprocessingLatency() throws Exception {
767         for (String id : mTestRule.getCameraIdsUnderTest()) {
768             for (int format : REPROCESS_FORMATS) {
769                 if (!isReprocessSupported(id, format)) {
770                     continue;
771                 }
772 
773                 try {
774                     mTestRule.openDevice(id);
775                     String streamName = "test_reprocessing_latency";
776                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
777                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
778                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
779                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
780                             /*highQuality*/false);
781                 } finally {
782                     closeReaderWriters();
783                     mTestRule.closeDevice(id);
784                     closePreviewSurface();
785                     mReportLog.submit(mInstrumentation);
786                 }
787             }
788         }
789     }
790 
791     /**
792      * Test reprocessing throughput with default NR and edge options,
793      * i.e., how many frames can be reprocessed during a given amount of time.
794      *
795      */
796     @Test
testReprocessingThroughput()797     public void testReprocessingThroughput() throws Exception {
798         for (String id : mTestRule.getCameraIdsUnderTest()) {
799             for (int format : REPROCESS_FORMATS) {
800                 if (!isReprocessSupported(id, format)) {
801                     continue;
802                 }
803 
804                 try {
805                     mTestRule.openDevice(id);
806                     String streamName = "test_reprocessing_throughput";
807                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
808                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
809                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
810                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
811                             /*highQuality*/false);
812                 } finally {
813                     closeReaderWriters();
814                     mTestRule.closeDevice(id);
815                     closePreviewSurface();
816                     mReportLog.submit(mInstrumentation);
817                 }
818             }
819         }
820     }
821 
822     /**
823      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
824      * time a reprocess request is issued to the time the reprocess image is returned.
825      */
826     @Test
testHighQualityReprocessingLatency()827     public void testHighQualityReprocessingLatency() throws Exception {
828         for (String id : mTestRule.getCameraIdsUnderTest()) {
829             for (int format : REPROCESS_FORMATS) {
830                 if (!isReprocessSupported(id, format)) {
831                     continue;
832                 }
833 
834                 try {
835                     mTestRule.openDevice(id);
836                     String streamName = "test_high_quality_reprocessing_latency";
837                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
838                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
839                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
840                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
841                             /*requireHighQuality*/true);
842                 } finally {
843                     closeReaderWriters();
844                     mTestRule.closeDevice(id);
845                     closePreviewSurface();
846                     mReportLog.submit(mInstrumentation);
847                 }
848             }
849         }
850     }
851 
852     /**
853      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
854      * be reprocessed during a given amount of time.
855      *
856      */
857     @Test
testHighQualityReprocessingThroughput()858     public void testHighQualityReprocessingThroughput() throws Exception {
859         for (String id : mTestRule.getCameraIdsUnderTest()) {
860             for (int format : REPROCESS_FORMATS) {
861                 if (!isReprocessSupported(id, format)) {
862                     continue;
863                 }
864 
865                 try {
866                     mTestRule.openDevice(id);
867                     String streamName = "test_high_quality_reprocessing_throughput";
868                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
869                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
870                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
871                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
872                             /*requireHighQuality*/true);
873                 } finally {
874                     closeReaderWriters();
875                     mTestRule.closeDevice(id);
876                     closePreviewSurface();
877                     mReportLog.submit(mInstrumentation);
878                 }
879             }
880         }
881     }
882 
883     /**
884      * Testing reprocessing caused preview stall (frame drops)
885      */
886     @Test
testReprocessingCaptureStall()887     public void testReprocessingCaptureStall() throws Exception {
888         for (String id : mTestRule.getCameraIdsUnderTest()) {
889             for (int format : REPROCESS_FORMATS) {
890                 if (!isReprocessSupported(id, format)) {
891                     continue;
892                 }
893 
894                 try {
895                     mTestRule.openDevice(id);
896                     String streamName = "test_reprocessing_capture_stall";
897                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
898                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
899                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
900                     reprocessingCaptureStallTestByCamera(format);
901                 } finally {
902                     closeReaderWriters();
903                     mTestRule.closeDevice(id);
904                     closePreviewSurface();
905                     mReportLog.submit(mInstrumentation);
906                 }
907             }
908         }
909     }
910 
911     // Direction of zoom: in or out
912     private enum ZoomDirection {
913         ZOOM_IN,
914         ZOOM_OUT;
915     }
916 
917     // Range of zoom: >= 1.0x, <= 1.0x, or full range.
918     private enum ZoomRange {
919         RATIO_1_OR_LARGER,
920         RATIO_1_OR_SMALLER,
921         RATIO_FULL_RANGE;
922     }
923 
924     /**
925      * Testing Zoom settings override performance for zoom in from 1.0x
926      *
927      * The range of zoomRatio being tested is [1.0x, maxZoomRatio]
928      * The test is skipped if minZoomRatio == 1.0x.
929      */
930     @Test
testZoomSettingsOverrideLatencyInFrom1x()931     public void testZoomSettingsOverrideLatencyInFrom1x() throws Exception {
932         testZoomSettingsOverrideLatency("zoom_in_from_1x",
933                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_1_OR_LARGER,
934                 /*checkSmoothZoom*/ false);
935     }
936 
937     /**
938      * Testing Zoom settings override performance for zoom out to 1.0x
939      *
940      * The range of zoomRatio being tested is [maxZoomRatio, 1.0x]
941      * The test is skipped if minZoomRatio == 1.0x.
942      */
943     @Test
testZoomSettingsOverrideLatencyOutTo1x()944     public void testZoomSettingsOverrideLatencyOutTo1x() throws Exception {
945         testZoomSettingsOverrideLatency("zoom_out_to_1x",
946                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_LARGER,
947                 /*checkSmoothZoom*/ false);
948     }
949 
950     /**
951      * Testing Zoom settings override performance for zoom out from 1.0x
952      *
953      * The range of zoomRatios being tested is [1.0x, minZoomRatio].
954      * The test is skipped if minZoomRatio == 1.0x.
955      */
956     @Test
testZoomSettingsOverrideLatencyOutFrom1x()957     public void testZoomSettingsOverrideLatencyOutFrom1x() throws Exception {
958         testZoomSettingsOverrideLatency("zoom_out_from_1x",
959                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_SMALLER,
960                 /*checkSmoothZoom*/ false);
961     }
962 
963     /**
964      * Testing Zoom settings override performance for zoom in full range
965      *
966      * The range of zoomRatios being tested is [minZoomRatio, maxZoomRatio].
967      */
968     @Test
testZoomSettingsOverrideLatencyInWithUltraWide()969     public void testZoomSettingsOverrideLatencyInWithUltraWide() throws Exception {
970         testZoomSettingsOverrideLatency("zoom_in_from_ultrawide",
971                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_FULL_RANGE,
972                 /*checkSmoothZoom*/ true);
973     }
974 
975     /**
976      * Testing Zoom settings override performance for zoom out full range
977      *
978      * The range of zoomRatios being tested is [maxZoomRatio, minZoomRatio].
979      */
980     @Test
testZoomSettingsOverrideLatencyOutWithUltraWide()981     public void testZoomSettingsOverrideLatencyOutWithUltraWide() throws Exception {
982         testZoomSettingsOverrideLatency("zoom_out_to_ultrawide",
983                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_FULL_RANGE,
984                 /*checkSmoothZoom*/ true);
985     }
986 
987     /**
988      * Get zoom ratios to be tested for zoom settings override test
989      */
getZoomRatiosToTest(StaticMetadata staticMetadata, boolean checkSmoothZoomForV, ZoomDirection direction, ZoomRange range)990     private double[] getZoomRatiosToTest(StaticMetadata staticMetadata,
991             boolean checkSmoothZoomForV, ZoomDirection direction, ZoomRange range) {
992         Range<Float> zoomRatioRange = staticMetadata.getZoomRatioRangeChecked();
993         final float kSmoothZoomStep = 0.1f;
994         final float kMaxZoomRatio = 10.0f;
995         float startRatio = zoomRatioRange.getLower();
996         float endRatio = Math.min(zoomRatioRange.getUpper(), kMaxZoomRatio);
997 
998         if (range == ZoomRange.RATIO_1_OR_LARGER) {
999             startRatio = 1.0f;
1000         } else if (range == ZoomRange.RATIO_1_OR_SMALLER) {
1001             endRatio = 1.0f;
1002         }
1003 
1004         ArrayList<Double> zoomRatios = new ArrayList<>();
1005         if (!checkSmoothZoomForV) {
1006             // If not checking smooth zoom, equally divide zoom range into NUM_ZOOM_STEPS
1007             // equal pieces.
1008             for (int i = 0; i <= NUM_ZOOM_STEPS; i++) {
1009                 double ratio = startRatio + (endRatio - startRatio) * i / NUM_ZOOM_STEPS;
1010                 zoomRatios.add(roundAwayFrom1(ratio));
1011             }
1012         } else {
1013             // If checking smooth zoom:
1014             // 1. Divide zoom range logarithmically to align with user perception.
1015             // 2. Smaller steps to simulate pinch zoom better, and at the same time giving
1016             //    lens switch enough time.
1017             double stepLog = Math.log(1.0f + kSmoothZoomStep);
1018             // Add zoom-out ratios
1019             for (double logRatio = 0.0f; logRatio >= Math.log(startRatio);
1020                     logRatio -= stepLog) {
1021                 zoomRatios.addFirst(roundAwayFrom1(Math.exp(logRatio)));
1022             }
1023             // Add zoom-in ratios
1024             for (double logRatio = stepLog; logRatio <= Math.log(endRatio);
1025                     logRatio += stepLog) {
1026                 zoomRatios.add(roundAwayFrom1(Math.exp(logRatio)));
1027             }
1028         }
1029 
1030         if (direction == ZoomDirection.ZOOM_OUT) {
1031             Collections.reverse(zoomRatios);
1032         }
1033         return zoomRatios.stream().mapToDouble(d -> d).toArray();
1034     }
1035 
1036     /**
1037      * Round the given zoom ratio so that it is not equal to 1.0
1038      *
1039      * TODO: b/350076823: Stay away from 1.0x so that camera framework doesn't
1040      * move the effective zoom rate to SCALER_CROP_REGION.
1041      */
roundAwayFrom1(double zoomRatio)1042     private double roundAwayFrom1(double zoomRatio) {
1043         final double kZoomRatioAt1x = 1.01f;
1044         return zoomRatio == 1.0 ? kZoomRatioAt1x : zoomRatio;
1045     }
1046 
1047     /**
1048      * This test measures the zoom latency improvement for devices supporting zoom settings
1049      * override.
1050      */
testZoomSettingsOverrideLatency(String testCase, ZoomDirection direction, ZoomRange range, boolean checkSmoothZoom)1051     private void testZoomSettingsOverrideLatency(String testCase,
1052             ZoomDirection direction, ZoomRange range, boolean checkSmoothZoom) throws Exception {
1053         final float ZOOM_ERROR_MARGIN = 0.05f;
1054         final float ERROR_THRESH_FACTOR = 0.33f;
1055         final int ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES = 1;
1056         final int MAX_IMPROVEMENT_VARIATION = 2;
1057         final boolean atLeastV =
1058                 PropertyUtil.getFirstApiLevel() > Build.VERSION_CODES.UPSIDE_DOWN_CAKE;
1059         final boolean checkSmoothZoomForV = checkSmoothZoom && atLeastV;
1060 
1061         for (String id : mTestRule.getCameraIdsUnderTest()) {
1062             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1063             CameraCharacteristics ch = staticMetadata.getCharacteristics();
1064 
1065             if (!staticMetadata.isColorOutputSupported()) {
1066                 continue;
1067             }
1068 
1069             if (!staticMetadata.isZoomSettingsOverrideSupported()) {
1070                 continue;
1071             }
1072 
1073             Range<Float> zoomRatioRange = staticMetadata.getZoomRatioRangeChecked();
1074             float minZoomRatio = zoomRatioRange.getLower();
1075             if (minZoomRatio >= 1.0f && (range != ZoomRange.RATIO_FULL_RANGE)) {
1076                 // Skip if the tests are overlapping with the full range tests.
1077                 continue;
1078             }
1079 
1080             // Figure out zoom ratios to test
1081             double[] ratiosToTest = getZoomRatiosToTest(staticMetadata,
1082                     checkSmoothZoomForV, direction, range);
1083             int numZoomSteps = ratiosToTest.length;
1084             int[] overrideImprovements = new int[numZoomSteps - 1];
1085 
1086             Log.v(TAG, "Camera " + id + " zoom settings: " + Arrays.toString(ratiosToTest));
1087             String streamName = "test_camera_zoom_override_latency";
1088             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1089             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
1090             mReportLog.addValue("zoom_test_case", testCase, ResultType.NEUTRAL, ResultUnit.NONE);
1091 
1092             try {
1093                 mTestRule.openDevice(id);
1094                 mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1095                 updatePreviewSurface(mPreviewSize);
1096 
1097                 // Start viewfinder with settings override set and the starting zoom ratio,
1098                 // and wait for some number of frames.
1099                 CaptureRequest.Builder previewBuilder = configurePreviewOutputs(id);
1100                 previewBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
1101                         CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
1102                 previewBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE,
1103                         CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM);
1104                 float startZoomRatio = (float) ratiosToTest[0];
1105                 previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, startZoomRatio);
1106                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1107                 int sequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1108                         previewBuilder.build(), resultListener, mTestRule.getHandler());
1109                 CaptureResult result = CameraTestUtils.waitForNumResults(
1110                         resultListener, NUM_RESULTS_WAIT, WAIT_FOR_RESULT_TIMEOUT_MS);
1111 
1112                 float previousRatio = startZoomRatio;
1113                 for (int j = 0; j < numZoomSteps - 1; j++) {
1114                     float zoomFactor = (float) ratiosToTest[j + 1];
1115                     // The error margin needs to be adjusted based on the zoom step size.
1116                     // We take the min of ZOOM_ERROR_MARGIN and 1/3 of zoom ratio step.
1117                     float zoomErrorMargin = Math.min(ZOOM_ERROR_MARGIN,
1118                             (float) Math.abs(zoomFactor - previousRatio) * ERROR_THRESH_FACTOR);
1119                     previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor);
1120                     int newSequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1121                             previewBuilder.build(), resultListener, mTestRule.getHandler());
1122                     long lastFrameNumberForRequest =
1123                             resultListener.getCaptureSequenceLastFrameNumber(sequenceId,
1124                                     WAIT_FOR_RESULT_TIMEOUT_MS);
1125 
1126                     int improvement = 0;
1127                     long frameNumber = -1;
1128                     Log.v(TAG, "LastFrameNumber for sequence " + sequenceId + ": "
1129                             + lastFrameNumberForRequest);
1130                     while (frameNumber < lastFrameNumberForRequest + 1) {
1131                         TotalCaptureResult zoomResult = resultListener.getTotalCaptureResult(
1132                                 WAIT_FOR_RESULT_TIMEOUT_MS);
1133                         frameNumber = zoomResult.getFrameNumber();
1134                         float resultZoomFactor = zoomResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
1135 
1136                         assertTrue(String.format("Zoom ratio should monotonically increase/decrease"
1137                                 + " or stay the same (previous = %f, current = %f", previousRatio,
1138                                 resultZoomFactor),
1139                                 Math.abs(previousRatio - resultZoomFactor) < zoomErrorMargin
1140                                 || (direction == ZoomDirection.ZOOM_IN
1141                                         && previousRatio < resultZoomFactor)
1142                                 || (direction == ZoomDirection.ZOOM_OUT
1143                                         && previousRatio > resultZoomFactor));
1144 
1145                         if (Math.abs(resultZoomFactor - zoomFactor) < zoomErrorMargin
1146                                 && improvement == 0) {
1147                             improvement = (int) (lastFrameNumberForRequest + 1 - frameNumber);
1148                         }
1149                         Log.v(TAG, "frameNumber " + frameNumber + " zoom: " + resultZoomFactor
1150                                 + " improvement: " + improvement);
1151                         previousRatio = resultZoomFactor;
1152                     }
1153 
1154                     // For firstApiLevel < V, zoom in must have at least 1 frame latency
1155                     // improvement. For firstApiLevel >= V, both zoom in and out must have
1156                     // at least 1 frame latency improvement.
1157                     if ((checkSmoothZoomForV || (range == ZoomRange.RATIO_1_OR_LARGER
1158                             && direction == ZoomDirection.ZOOM_IN))
1159                             && staticMetadata.isPerFrameControlSupported()) {
1160                         mTestRule.getCollector().expectTrue(
1161                                 "Zoom-in latency improvement (" + improvement
1162                                 + ") must be at least " + ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES,
1163                                 improvement >= ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES);
1164                     }
1165                     overrideImprovements[j] = improvement;
1166 
1167                     sequenceId = newSequenceId;
1168                 }
1169 
1170                 int minImprovement = Arrays.stream(overrideImprovements).min().getAsInt();
1171                 int maxImprovement = Arrays.stream(overrideImprovements).max().getAsInt();
1172                 int variation = maxImprovement - minImprovement;
1173                 // To check smooth zoom for V, the latency improvement must not introduce
1174                 // extra pipeline delay variation.
1175                 int maxVariation = checkSmoothZoomForV ? 0 : MAX_IMPROVEMENT_VARIATION;
1176                 assertTrue(
1177                         String.format("Zoom latency improvement variation %d must not exceed %d",
1178                                 variation, MAX_IMPROVEMENT_VARIATION), variation <= maxVariation);
1179 
1180                 mReportLog.addValues("Camera zoom ratios", ratiosToTest, ResultType.NEUTRAL,
1181                         ResultUnit.NONE);
1182                 mReportLog.addValues("Latency improvements", overrideImprovements,
1183                         ResultType.HIGHER_BETTER, ResultUnit.FRAMES);
1184             } finally {
1185                 mTestRule.closeDefaultImageReader();
1186                 mTestRule.closeDevice(id);
1187                 closePreviewSurface();
1188             }
1189             mReportLog.submit(mInstrumentation);
1190 
1191             if (VERBOSE) {
1192                 Log.v(TAG, "Camera " + id + " zoom settings: " + Arrays.toString(ratiosToTest));
1193                 Log.v(TAG, "Camera " + id + " zoom settings override latency improvements "
1194                         + "(in frames): " + Arrays.toString(overrideImprovements));
1195             }
1196         }
1197     }
1198 
1199     /**
1200      * Testing SurfaceView jitter reduction performance
1201      *
1202      * Because the application doesn't have access to SurfaceView frames,
1203      * we use an ImageReader with COMPOSER_OVERLAY usage.
1204      */
1205     @Test
1206     public void testSurfaceViewJitterReduction() throws Exception {
1207         String cameraId = null;
1208         Range<Integer>[] aeFpsRanges = null;
1209         for (String id : mTestRule.getCameraIdsUnderTest()) {
1210             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1211             if (staticMetadata.isColorOutputSupported()) {
1212                 cameraId = id;
1213                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1214                 // Because jitter reduction is a framework feature and not camera specific,
1215                 // we only test for 1 camera Id.
1216                 break;
1217             }
1218         }
1219         if (cameraId == null) {
1220             Log.i(TAG, "No camera supports color outputs, skipping");
1221             return;
1222         }
1223 
1224         try {
1225             mTestRule.openDevice(cameraId);
1226 
1227             for (Range<Integer> fpsRange : aeFpsRanges) {
1228                 if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
1229                     testPreviewJitterForFpsRange(cameraId,
1230                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1231                             /*reduceJitter*/false, fpsRange);
1232 
1233                     testPreviewJitterForFpsRange(cameraId,
1234                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1235                             /*reduceJitter*/true, fpsRange);
1236                 }
1237             }
1238         } finally {
1239             mTestRule.closeDevice(cameraId);
1240         }
1241     }
1242 
1243     /**
1244      * Testing SurfaceTexture jitter reduction performance
1245      */
1246     @Test
1247     public void testSurfaceTextureJitterReduction() throws Exception {
1248         String cameraId = null;
1249         Range<Integer>[] aeFpsRanges = null;
1250         for (String id : mTestRule.getCameraIdsUnderTest()) {
1251             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1252             if (staticMetadata.isColorOutputSupported()) {
1253                 cameraId = id;
1254                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1255                 // Because jitter reduction is a framework feature and not camera specific,
1256                 // we only test for 1 camera Id.
1257                 break;
1258             }
1259         }
1260         if (cameraId == null) {
1261             Log.i(TAG, "No camera supports color outputs, skipping");
1262             return;
1263         }
1264 
1265         try {
1266             mTestRule.openDevice(cameraId);
1267 
1268             for (Range<Integer> fpsRange : aeFpsRanges) {
1269                 if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
1270                     testPreviewJitterForFpsRange(cameraId,
1271                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1272                             /*reduceJitter*/false, fpsRange);
1273                     testPreviewJitterForFpsRange(cameraId,
1274                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1275                             /*reduceJitter*/true, fpsRange);
1276                 }
1277             }
1278         } finally {
1279             mTestRule.closeDevice(cameraId);
1280         }
1281     }
1282 
1283     private void testPreviewJitterForFpsRange(String cameraId, long usage,
1284             boolean reduceJitter, Range<Integer> fpsRange) throws Exception {
1285         try {
1286             assertTrue("usage must be COMPOSER_OVERLAY/GPU_SAMPLED_IMAGE, but is " + usage,
1287                     usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY
1288                     || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE);
1289             String streamName = "test_camera_preview_jitter_";
1290             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1291                 streamName += "surface_view";
1292             } else {
1293                 streamName += "surface_texture";
1294             }
1295             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1296             mReportLog.addValue("camera_id", cameraId, ResultType.NEUTRAL, ResultUnit.NONE);
1297 
1298             // Display refresh rate while camera is active. Note that the default display's
1299             // getRefreshRate() isn't reflecting the real refresh rate. Hardcode it for now.
1300             float refreshRate = 60.0f;
1301             float numRefreshesPerDuration = refreshRate / fpsRange.getLower();
1302             long refreshInterval = (long) (1000000000L / refreshRate);
1303 
1304             Long frameDuration = (long) (1e9 / fpsRange.getLower());
1305             initializeImageReader(cameraId, ImageFormat.PRIVATE,
1306                     frameDuration, usage);
1307 
1308             CameraCharacteristics ch =
1309                     mTestRule.getCameraManager().getCameraCharacteristics(cameraId);
1310             Integer timestampSource = ch.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1311             assertNotNull("Timestamp source must not be null", timestampSource);
1312 
1313             boolean timestampIsRealtime = false;
1314             if (timestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME
1315                     && (!reduceJitter || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)) {
1316                 timestampIsRealtime = true;
1317             }
1318             SimpleTimestampListener imageListener =
1319                     new SimpleTimestampListener(timestampIsRealtime);
1320             mTestRule.getReader().setOnImageAvailableListener(
1321                     imageListener, mTestRule.getHandler());
1322 
1323             CaptureRequest.Builder previewBuilder = mTestRule.getCamera().createCaptureRequest(
1324                     CameraDevice.TEMPLATE_PREVIEW);
1325             previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
1326             previewBuilder.addTarget(mTestRule.getReaderSurface());
1327             CaptureRequest previewRequest = previewBuilder.build();
1328 
1329             List<OutputConfiguration> outputConfigs = new ArrayList<>();
1330             OutputConfiguration config = new OutputConfiguration(mTestRule.getReaderSurface());
1331             if (!reduceJitter) {
1332                 config.setTimestampBase(OutputConfiguration.TIMESTAMP_BASE_SENSOR);
1333             }
1334             outputConfigs.add(config);
1335 
1336             boolean useSessionKeys = isFpsRangeASessionKey(ch);
1337             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1338             configureAndSetCameraSessionWithConfigs(outputConfigs, useSessionKeys, previewRequest);
1339 
1340             // Start preview and run for 6 seconds
1341             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1342             mTestRule.getCameraSession().setRepeatingRequest(
1343                     previewRequest, resultListener, mTestRule.getHandler());
1344 
1345             Thread.sleep(6000);
1346 
1347             blockingStopRepeating();
1348 
1349             // Let N be expected number of VSYNCs between frames
1350             //
1351             // Number of frames ahead of expected VSYNC: 0.5 * VSYNC < frame duration <=
1352             // (N - 0.5) * VSYNC
1353             long framesAheadCount = 0;
1354             // Number of frames delayed past the expected VSYNC: frame duration >= (N + 0.5) * VSYNC
1355             long framesDelayedCount = 0;
1356             // Number of frames dropped: Fell into one single VSYNC
1357             long framesDroppedCount = 0;
1358             // The number of frame intervals in total
1359             long intervalCount = imageListener.getTimestampCount() - 1;
1360             assertTrue("Number of timestamp intervals must be at least 1, but is " + intervalCount,
1361                     intervalCount >= 1);
1362             // The sum of delays in ms for all frames captured
1363             double framesDelayInMs = 0;
1364 
1365             SimpleTimestampListener.TimestampHolder timestamp1 =
1366                     imageListener.getNextTimestampHolder();
1367             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1368                 framesDelayInMs =
1369                         Math.max(0, timestamp1.mTimestamp - timestamp1.mDeliveryTime) / 1000000;
1370             } else {
1371                 framesDelayInMs =
1372                         (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1373             }
1374             for (long i = 0; i < intervalCount; i++) {
1375                 SimpleTimestampListener.TimestampHolder timestamp2 =
1376                         imageListener.getNextTimestampHolder();
1377                 // The listener uses the image timestamp if it's in the future. Otherwise, use
1378                 // the current system time (image delivery time).
1379                 long presentTime2 = Math.max(timestamp2.mDeliveryTime, timestamp2.mTimestamp);
1380                 long presentTime1 = Math.max(timestamp1.mDeliveryTime, timestamp1.mTimestamp);
1381                 long frameInterval = presentTime2 - presentTime1;
1382                 if (frameInterval <= refreshInterval / 2) {
1383                     framesDroppedCount++;
1384                 } else if (frameInterval <= refreshInterval * (numRefreshesPerDuration - 0.5f)) {
1385                     framesAheadCount++;
1386                 } else if (frameInterval >=  refreshInterval * (numRefreshesPerDuration + 0.5f)) {
1387                     framesDelayedCount++;
1388                 }
1389 
1390                 if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1391                     framesDelayInMs +=
1392                             Math.max(0, timestamp2.mTimestamp - timestamp2.mDeliveryTime) / 1000000;
1393                 } else {
1394                     framesDelayInMs +=
1395                             (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1396                 }
1397                 timestamp1 = timestamp2;
1398             }
1399 
1400             mReportLog.addValue("reduce_jitter", reduceJitter, ResultType.NEUTRAL,
1401                     ResultUnit.NONE);
1402             mReportLog.addValue("camera_configured_frame_rate", fpsRange.getLower(),
1403                     ResultType.NEUTRAL, ResultUnit.NONE);
1404             mReportLog.addValue("camera_preview_frame_dropped_rate",
1405                     1.0f * framesDroppedCount / intervalCount, ResultType.LOWER_BETTER,
1406                     ResultUnit.NONE);
1407             mReportLog.addValue("camera_preview_frame_ahead_rate",
1408                     1.0f * framesAheadCount / intervalCount, ResultType.LOWER_BETTER,
1409                     ResultUnit.NONE);
1410             mReportLog.addValue("camera_preview_frame_delayed_rate",
1411                     1.0f * framesDelayedCount / intervalCount,
1412                     ResultType.LOWER_BETTER, ResultUnit.NONE);
1413             mReportLog.addValue("camera_preview_frame_latency_ms",
1414                     framesDelayInMs / (intervalCount + 1), ResultType.LOWER_BETTER,
1415                     ResultUnit.MS);
1416 
1417             if (VERBOSE) {
1418                 Log.v(TAG, "Camera " + cameraId + " frame rate: " + fpsRange.getLower()
1419                         + ", dropped rate: " + (1.0f * framesDroppedCount / intervalCount)
1420                         + ", ahead rate: " + (1.0f * framesAheadCount / intervalCount)
1421                         + ", delayed rate: " + (1.0f * framesDelayedCount / intervalCount)
1422                         + ", latency in ms: " + (framesDelayInMs / (intervalCount + 1)));
1423             }
1424         } finally {
1425             mTestRule.closeDefaultImageReader();
1426             mReportLog.submit(mInstrumentation);
1427         }
1428     }
1429 
1430     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
1431         prepareReprocessCapture(reprocessInputFormat);
1432 
1433         // Let it stream for a while before reprocessing
1434         startZslStreaming();
1435         waitForFrames(NUM_RESULTS_WAIT);
1436 
1437         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
1438         // Prepare several reprocessing request
1439         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
1440         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1441         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1442             inputImages[i] =
1443                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1444             TotalCaptureResult zslResult =
1445                     mZslResultListener.getCaptureResult(
1446                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1447             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1448             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1449             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1450                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1451             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1452                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1453             mWriter.queueInputImage(inputImages[i]);
1454         }
1455 
1456         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
1457         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
1458         Arrays.fill(averageFrameDurationMs, 0.0);
1459         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
1460         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
1461         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1462             mZslResultListener.drain();
1463             CaptureRequest reprocessRequest = reprocessReqs[i].build();
1464             mTestRule.getCameraSession().capture(
1465                     reprocessRequest, reprocessResultListener, mTestRule.getHandler());
1466             // Wait for reprocess output jpeg and result come back.
1467             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
1468                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1469             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1470             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
1471             assertTrue("Reprocess capture result should be returned in "
1472                             + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
1473                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
1474 
1475             // Need look longer time, as the stutter could happen after the reprocessing
1476             // output frame is received.
1477             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
1478             Arrays.fill(timestampGap, 0);
1479             CaptureResult[] results = new CaptureResult[timestampGap.length];
1480             long[] frameDurationsNs = new long[timestampGap.length];
1481             for (int j = 0; j < results.length; j++) {
1482                 results[j] = mZslResultListener.getCaptureResult(
1483                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1484                 if (j > 0) {
1485                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
1486                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
1487                     assertTrue("Time stamp should be monotonically increasing",
1488                             timestampGap[j] > 0);
1489                 }
1490                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
1491             }
1492 
1493             if (VERBOSE) {
1494                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
1495                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
1496             }
1497 
1498             // Get the number of candidate results, calculate the average frame duration
1499             // and max timestamp gap.
1500             Arrays.sort(timestampGap);
1501             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
1502             for (int m = 0; m < frameDurationsNs.length; m++) {
1503                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
1504             }
1505             averageFrameDurationMs[i] /= frameDurationsNs.length;
1506 
1507             maxCaptureGapsMs[i] = maxTimestampGapMs;
1508         }
1509 
1510         blockingStopRepeating();
1511 
1512         String reprocessType = "YUV reprocessing";
1513         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1514             reprocessType = "opaque reprocessing";
1515         }
1516         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
1517         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
1518                 ResultType.LOWER_BETTER, ResultUnit.MS);
1519         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
1520                 ResultType.LOWER_BETTER, ResultUnit.MS);
1521         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
1522                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1523 
1524         // The max timestamp gap should be less than (captureStall + 1) x average frame
1525         // duration * (1 + error margin).
1526         int maxCaptureStallFrames = mTestRule.getStaticInfo().getMaxCaptureStallOrDefault();
1527         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
1528             double stallDurationBound = averageFrameDurationMs[i] *
1529                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
1530             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
1531                     maxCaptureGapsMs[i] <= stallDurationBound);
1532         }
1533     }
1534 
1535     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
1536             boolean requireHighQuality)
1537             throws Exception {
1538         // Prepare the reprocessing capture
1539         prepareReprocessCapture(reprocessInputFormat);
1540 
1541         // Start ZSL streaming
1542         startZslStreaming();
1543         waitForFrames(NUM_RESULTS_WAIT);
1544 
1545         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1546         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
1547         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
1548         long startTimeMs;
1549         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1550             inputImages[i] =
1551                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1552             TotalCaptureResult zslResult =
1553                     mZslResultListener.getCaptureResult(
1554                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1555             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1556             if (requireHighQuality) {
1557                 // Reprocessing should support high quality for NR and edge modes.
1558                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1559                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1560                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1561                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1562             }
1563             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1564         }
1565 
1566         if (asyncMode) {
1567             // async capture: issue all the reprocess requests as quick as possible, then
1568             // check the throughput of the output jpegs.
1569             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1570                 // Could be slow for YUV reprocessing, do it in advance.
1571                 mWriter.queueInputImage(inputImages[i]);
1572             }
1573 
1574             // Submit the requests
1575             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1576                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1577             }
1578 
1579             // Get images
1580             startTimeMs = SystemClock.elapsedRealtime();
1581             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1582             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1583                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1584                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1585                 startTimeMs = SystemClock.elapsedRealtime();
1586             }
1587             for (Image i : jpegImages) {
1588                 i.close();
1589             }
1590         } else {
1591             // sync capture: issue reprocess request one by one, only submit next one when
1592             // the previous capture image is returned. This is to test the back to back capture
1593             // performance.
1594             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1595             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1596                 startTimeMs = SystemClock.elapsedRealtime();
1597                 mWriter.queueInputImage(inputImages[i]);
1598                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1599                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1600                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1601             }
1602             for (Image i : jpegImages) {
1603                 i.close();
1604             }
1605         }
1606 
1607         blockingStopRepeating();
1608 
1609         String reprocessType = "YUV reprocessing";
1610         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1611             reprocessType = "opaque reprocessing";
1612         }
1613 
1614         // Report the performance data
1615         String captureMsg;
1616         if (asyncMode) {
1617             captureMsg = "capture latency";
1618             if (requireHighQuality) {
1619                 captureMsg += " for High Quality noise reduction and edge modes";
1620             }
1621             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1622                     ResultUnit.NONE);
1623             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1624                     ResultUnit.NONE);
1625             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1626                     ResultUnit.MS);
1627             mReportLog.setSummary("camera_reprocessing_average_latency",
1628                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1629         } else {
1630             captureMsg = "shot to shot latency";
1631             if (requireHighQuality) {
1632                 captureMsg += " for High Quality noise reduction and edge modes";
1633             }
1634             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1635                     ResultUnit.NONE);
1636             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1637                     ResultUnit.NONE);
1638             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1639                     ResultUnit.MS);
1640             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
1641                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1642         }
1643     }
1644 
1645     /**
1646      * Start preview and ZSL streaming
1647      */
1648     private void startZslStreaming() throws Exception {
1649         CaptureRequest.Builder zslBuilder =
1650                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
1651         zslBuilder.addTarget(mPreviewSurface);
1652         zslBuilder.addTarget(mCameraZslReader.getSurface());
1653         mTestRule.getCameraSession().setRepeatingRequest(
1654                 zslBuilder.build(), mZslResultListener, mTestRule.getHandler());
1655     }
1656 
1657     /**
1658      * Wait for a certain number of frames, the images and results will be drained from the
1659      * listeners to make sure that next reprocessing can get matched results and images.
1660      *
1661      * @param numFrameWait The number of frames to wait before return, 0 means that
1662      *      this call returns immediately after streaming on.
1663      */
1664     private void waitForFrames(int numFrameWait) throws Exception {
1665         if (numFrameWait < 0) {
1666             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
1667                     " should be non-negative");
1668         }
1669 
1670         for (int i = 0; i < numFrameWait; i++) {
1671             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1672         }
1673     }
1674 
1675     private void closeReaderWriters() {
1676         mCameraZslImageListener.drain();
1677         CameraTestUtils.closeImageReader(mCameraZslReader);
1678         mCameraZslReader = null;
1679         mJpegListener.drain();
1680         CameraTestUtils.closeImageReader(mJpegReader);
1681         mJpegReader = null;
1682         CameraTestUtils.closeImageWriter(mWriter);
1683         mWriter = null;
1684     }
1685 
1686     private void prepareReprocessCapture(int inputFormat)
1687             throws CameraAccessException {
1688         // 1. Find the right preview and capture sizes.
1689         Size maxPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1690         Size[] supportedInputSizes =
1691                 mTestRule.getStaticInfo().getAvailableSizesForFormatChecked(inputFormat,
1692                         StaticMetadata.StreamDirection.Input);
1693         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
1694         Size maxJpegSize = mTestRule.getOrderedStillSizes().get(0);
1695         updatePreviewSurface(maxPreviewSize);
1696         mZslResultListener = new SimpleCaptureCallback();
1697 
1698         // 2. Create camera output ImageReaders.
1699         // YUV/Opaque output, camera should support output with input size/format
1700         mCameraZslImageListener = new SimpleImageReaderListener(
1701                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
1702         mCameraZslReader = CameraTestUtils.makeImageReader(
1703                 maxInputSize, inputFormat, MAX_ZSL_IMAGES,
1704                 mCameraZslImageListener, mTestRule.getHandler());
1705         // Jpeg reprocess output
1706         mJpegListener = new SimpleImageReaderListener();
1707         mJpegReader = CameraTestUtils.makeImageReader(
1708                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES,
1709                 mJpegListener, mTestRule.getHandler());
1710 
1711         // create camera reprocess session
1712         List<Surface> outSurfaces = new ArrayList<Surface>();
1713         outSurfaces.add(mPreviewSurface);
1714         outSurfaces.add(mCameraZslReader.getSurface());
1715         outSurfaces.add(mJpegReader.getSurface());
1716         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
1717                 maxInputSize.getHeight(), inputFormat);
1718         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1719         mTestRule.setCameraSession(CameraTestUtils.configureReprocessableCameraSession(
1720                 mTestRule.getCamera(), inputConfig, outSurfaces,
1721                 mTestRule.getCameraSessionListener(), mTestRule.getHandler()));
1722 
1723         // 3. Create ImageWriter for input
1724         mWriter = CameraTestUtils.makeImageWriter(
1725                 mTestRule.getCameraSession().getInputSurface(), MAX_INPUT_IMAGES,
1726                 /*listener*/null, /*handler*/null);
1727     }
1728 
1729     /**
1730      * Stop repeating requests for current camera and waiting for it to go back to idle, resulting
1731      * in an idle device.
1732      */
1733     private void blockingStopRepeating() throws Exception {
1734         stopRepeating();
1735         mTestRule.getCameraSessionListener().getStateWaiter().waitForState(
1736                 BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
1737     }
1738 
1739     private void blockingStartPreview(String id, CaptureCallback listener,
1740             CaptureRequest previewRequest, SimpleImageListener imageListener)
1741             throws Exception {
1742         mTestRule.getCameraSession().setRepeatingRequest(
1743                 previewRequest, listener, mTestRule.getHandler());
1744         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1745     }
1746 
1747     /**
1748      * Setup still capture configuration and start preview.
1749      *
1750      * @param id The camera id under test
1751      * @param previewBuilder The capture request builder to be used for preview
1752      * @param stillBuilder The capture request builder to be used for still capture
1753      * @param previewSz Preview size
1754      * @param captureSizes Still capture sizes
1755      * @param formats The single capture image formats
1756      * @param resultListener Capture result listener
1757      * @param maxNumImages The max number of images set to the image reader
1758      * @param imageListeners The single capture capture image listeners
1759      * @param enablePostView Enable post view as part of the still capture request
1760      */
1761     private ImageReader[] prepareStillCaptureAndStartPreview(String id,
1762             CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder,
1763             Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener,
1764             int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners,
1765             boolean enablePostView)
1766             throws Exception {
1767 
1768         if ((captureSizes == null) || (formats == null) || (imageListeners == null) &&
1769                 (captureSizes.length != formats.length) ||
1770                 (formats.length != imageListeners.length)) {
1771             throw new IllegalArgumentException("Invalid capture sizes/formats or image listeners!");
1772         }
1773 
1774         if (VERBOSE) {
1775             Log.v(TAG, String.format("Prepare still capture and preview (%s)",
1776                     previewSz.toString()));
1777         }
1778 
1779         // Update preview size.
1780         updatePreviewSurface(previewSz);
1781 
1782         ImageReader[] readers = new ImageReader[captureSizes.length];
1783         List<Surface> outputSurfaces = new ArrayList<Surface>();
1784         outputSurfaces.add(mPreviewSurface);
1785         for (int i = 0; i < captureSizes.length; i++) {
1786             readers[i] = CameraTestUtils.makeImageReader(captureSizes[i], formats[i], maxNumImages,
1787                     imageListeners[i], mTestRule.getHandler());
1788             outputSurfaces.add(readers[i].getSurface());
1789         }
1790 
1791         // Configure the requests.
1792         previewBuilder.addTarget(mPreviewSurface);
1793         if (enablePostView)
1794             stillBuilder.addTarget(mPreviewSurface);
1795         for (int i = 0; i < readers.length; i++) {
1796             stillBuilder.addTarget(readers[i].getSurface());
1797         }
1798 
1799         // Update target fps based on the min frame duration of preview.
1800         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1801         StreamConfigurationMap config = ch.get(
1802                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1803         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS, config.getOutputMinFrameDuration(
1804                 SurfaceTexture.class, previewSz));
1805         Range<Integer> targetRange =
1806                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1807                 minFrameDuration, mTestRule.getStaticInfo());
1808         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1809         stillBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1810 
1811         CaptureRequest previewRequest = previewBuilder.build();
1812         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1813         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1814         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1815 
1816         // Start preview.
1817         mTestRule.getCameraSession().setRepeatingRequest(
1818                 previewRequest, resultListener, mTestRule.getHandler());
1819 
1820         return readers;
1821     }
1822 
1823     /**
1824      * Helper function to check if TARGET_FPS_RANGE is a session parameter
1825      */
1826     private boolean isFpsRangeASessionKey(CameraCharacteristics ch) {
1827         List<CaptureRequest.Key<?>> sessionKeys = ch.getAvailableSessionKeys();
1828         return sessionKeys != null &&
1829                 sessionKeys.contains(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
1830     }
1831 
1832     /**
1833      * Helper function to configure camera session using parameters provided.
1834      */
1835     private void configureAndSetCameraSession(List<Surface> surfaces,
1836             boolean useInitialRequest, CaptureRequest initialRequest)
1837             throws CameraAccessException {
1838         CameraCaptureSession cameraSession;
1839         if (useInitialRequest) {
1840             cameraSession = CameraTestUtils.configureCameraSessionWithParameters(
1841                 mTestRule.getCamera(), surfaces,
1842                 mTestRule.getCameraSessionListener(), mTestRule.getHandler(),
1843                 initialRequest);
1844         } else {
1845             cameraSession = CameraTestUtils.configureCameraSession(
1846                 mTestRule.getCamera(), surfaces,
1847                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1848         }
1849         mTestRule.setCameraSession(cameraSession);
1850     }
1851 
1852     /*
1853      * Helper function to configure camera session using parameters provided.
1854      */
1855     private void configureAndSetCameraSessionWithConfigs(List<OutputConfiguration> configs,
1856             boolean useInitialRequest, CaptureRequest initialRequest)
1857             throws CameraAccessException {
1858         CameraCaptureSession cameraSession;
1859         if (useInitialRequest) {
1860             cameraSession = CameraTestUtils.tryConfigureCameraSessionWithConfig(
1861                 mTestRule.getCamera(), configs, initialRequest,
1862                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1863         } else {
1864             cameraSession = CameraTestUtils.configureCameraSessionWithConfig(
1865                 mTestRule.getCamera(), configs,
1866                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1867         }
1868         mTestRule.setCameraSession(cameraSession);
1869     }
1870 
1871     /**
1872      * Setup single capture configuration and start preview.
1873      *
1874      * @param previewBuilder The capture request builder to be used for preview
1875      * @param stillBuilder The capture request builder to be used for still capture
1876      * @param previewSz Preview size
1877      * @param captureSz Still capture size
1878      * @param format The single capture image format
1879      * @param resultListener Capture result listener
1880      * @param sessionListener Session listener
1881      * @param maxNumImages The max number of images set to the image reader
1882      * @param imageListener The single capture capture image listener
1883      * @param useSessionKeys Create capture session using session keys from previewRequest
1884      */
1885     private void prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder,
1886             CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format,
1887             CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener,
1888             int maxNumImages, ImageReader.OnImageAvailableListener imageListener,
1889             boolean  useSessionKeys) throws Exception {
1890         if ((captureSz == null) || (imageListener == null)) {
1891             throw new IllegalArgumentException("Invalid capture size or image listener!");
1892         }
1893 
1894         if (VERBOSE) {
1895             Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
1896                     captureSz.toString(), previewSz.toString()));
1897         }
1898 
1899         // Update preview size.
1900         updatePreviewSurface(previewSz);
1901 
1902         // Create ImageReader.
1903         mTestRule.createDefaultImageReader(captureSz, format, maxNumImages, imageListener);
1904 
1905         // Configure output streams with preview and jpeg streams.
1906         List<Surface> outputSurfaces = new ArrayList<Surface>();
1907         outputSurfaces.add(mPreviewSurface);
1908         outputSurfaces.add(mTestRule.getReaderSurface());
1909         if (sessionListener == null) {
1910             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1911         } else {
1912             mTestRule.setCameraSessionListener(new BlockingSessionCallback(sessionListener));
1913         }
1914 
1915         // Configure the requests.
1916         previewBuilder.addTarget(mPreviewSurface);
1917         stillBuilder.addTarget(mPreviewSurface);
1918         stillBuilder.addTarget(mTestRule.getReaderSurface());
1919         CaptureRequest previewRequest = previewBuilder.build();
1920 
1921         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1922 
1923         // Start preview.
1924         mTestRule.getCameraSession().setRepeatingRequest(
1925                 previewRequest, resultListener, mTestRule.getHandler());
1926     }
1927 
1928     /**
1929      * Update the preview surface size.
1930      *
1931      * @param size The preview size to be updated.
1932      */
1933     private void updatePreviewSurface(Size size) {
1934         if ((mPreviewSurfaceTexture != null ) || (mPreviewSurface != null)) {
1935             closePreviewSurface();
1936         }
1937 
1938         mPreviewSurfaceTexture = new SurfaceTexture(/*random int*/ 1);
1939         mPreviewSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
1940         mPreviewSurface = new Surface(mPreviewSurfaceTexture);
1941     }
1942 
1943     /**
1944      * Release preview surface and corresponding surface texture.
1945      */
1946     private void closePreviewSurface() {
1947         if (mPreviewSurface != null) {
1948             mPreviewSurface.release();
1949             mPreviewSurface = null;
1950         }
1951 
1952         if (mPreviewSurfaceTexture != null) {
1953             mPreviewSurfaceTexture.release();
1954             mPreviewSurfaceTexture = null;
1955         }
1956     }
1957 
1958     private boolean isReprocessSupported(String cameraId, int format)
1959             throws CameraAccessException {
1960         if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
1961             throw new IllegalArgumentException(
1962                     "format " + format + " is not supported for reprocessing");
1963         }
1964 
1965         StaticMetadata info = new StaticMetadata(
1966                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId), CheckLevel.ASSERT,
1967                 /*collector*/ null);
1968         int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
1969         if (format == ImageFormat.PRIVATE) {
1970             cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
1971         }
1972         return info.isCapabilitySupported(cap);
1973     }
1974 
1975     /**
1976      * Stop the repeating requests of current camera.
1977      * Does _not_ wait for the device to go idle
1978      */
1979     private void stopRepeating() throws Exception {
1980         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1981         if (mTestRule.getCameraSession() != null) {
1982             if (VERBOSE) Log.v(TAG, "Stopping preview");
1983             mTestRule.getCameraSession().stopRepeating();
1984         }
1985     }
1986 
1987     /**
1988      * Configure reader and preview outputs and wait until done.
1989      *
1990      * @return The preview capture request
1991      */
1992     private CaptureRequest configureReaderAndPreviewOutputs(
1993             String id, boolean isColorOutputSupported)
1994             throws Exception {
1995         if (mPreviewSurface == null || mTestRule.getReaderSurface() == null) {
1996             throw new IllegalStateException("preview and reader surface must be initilized first");
1997         }
1998 
1999         // Create previewBuilder
2000         CaptureRequest.Builder previewBuilder =
2001                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2002         if (isColorOutputSupported) {
2003             previewBuilder.addTarget(mPreviewSurface);
2004         }
2005         previewBuilder.addTarget(mTestRule.getReaderSurface());
2006 
2007 
2008         // Figure out constant target FPS range no larger than 30fps
2009         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
2010         StreamConfigurationMap config =
2011                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2012         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
2013                 config.getOutputMinFrameDuration(mImageReaderFormat, mPreviewSize));
2014 
2015         List<Surface> outputSurfaces = new ArrayList<>();
2016         outputSurfaces.add(mTestRule.getReaderSurface());
2017         if (isColorOutputSupported) {
2018             outputSurfaces.add(mPreviewSurface);
2019             minFrameDuration = Math.max(minFrameDuration,
2020                     config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
2021         }
2022         Range<Integer> targetRange =
2023                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
2024                         minFrameDuration, mTestRule.getStaticInfo());
2025         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
2026 
2027         // Create capture session
2028         boolean useSessionKeys = isFpsRangeASessionKey(ch);
2029         CaptureRequest previewRequest = previewBuilder.build();
2030         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
2031         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
2032 
2033         return previewRequest;
2034     }
2035 
2036     /**
2037      * Configure preview outputs and wait until done.
2038      *
2039      * @return The preview capture request builder
2040      */
2041     private CaptureRequest.Builder configurePreviewOutputs(String id)
2042             throws Exception {
2043         if (mPreviewSurface == null) {
2044             throw new IllegalStateException("preview surface must be initialized first");
2045         }
2046 
2047         // Create previewBuilder
2048         CaptureRequest.Builder previewBuilder =
2049                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2050         previewBuilder.addTarget(mPreviewSurface);
2051 
2052         // Figure out constant target FPS range no larger than 30fps
2053         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
2054         StreamConfigurationMap config =
2055                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2056         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
2057                 config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
2058 
2059         List<Surface> outputSurfaces = new ArrayList<>();
2060         outputSurfaces.add(mPreviewSurface);
2061         Range<Integer> targetRange =
2062                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
2063                         minFrameDuration, mTestRule.getStaticInfo());
2064         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
2065 
2066         // Create capture session
2067         boolean useSessionKeys = isFpsRangeASessionKey(ch);
2068         CaptureRequest previewRequest = previewBuilder.build();
2069         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
2070         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
2071 
2072         return previewBuilder;
2073     }
2074 
2075     /**
2076      * Initialize the ImageReader instance and preview surface.
2077      * @param cameraId The camera to be opened.
2078      * @param format The format used to create ImageReader instance.
2079      */
2080     private void initializeImageReader(String cameraId, int format) throws Exception {
2081         initializeImageReader(cameraId, format, null/*maxFrameDuration*/, 0/*usage*/);
2082     }
2083 
2084     /**
2085      * Initialize the ImageReader instance and preview surface.
2086      * @param cameraId The camera to be opened.
2087      * @param format The format used to create ImageReader instance.
2088      * @param frameDuration The min frame duration of the ImageReader cannot be larger than
2089      *                      frameDuration.
2090      * @param usage The usage of the ImageReader
2091      */
2092     private void initializeImageReader(String cameraId, int format, Long frameDuration, long usage)
2093             throws Exception {
2094         List<Size> boundedSizes = CameraTestUtils.getSortedSizesForFormat(
2095                 cameraId, mTestRule.getCameraManager(), format,
2096                 CameraTestUtils.getPreviewSizeBound(mTestRule.getWindowManager(),
2097                         CameraTestUtils.PREVIEW_SIZE_BOUND));
2098 
2099         // Remove the sizes not meeting the frame duration requirement.
2100         final float kFrameDurationTolerance = 0.01f;
2101         if (frameDuration != null) {
2102             StreamConfigurationMap configMap = mTestRule.getStaticInfo().getValueFromKeyNonNull(
2103                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2104             ListIterator<Size> iter = boundedSizes.listIterator();
2105             while (iter.hasNext()) {
2106                 long duration = configMap.getOutputMinFrameDuration(format, iter.next());
2107                 if (duration > frameDuration * (1 + kFrameDurationTolerance)) {
2108                     iter.remove();
2109                 }
2110             }
2111         }
2112 
2113         mTestRule.setOrderedPreviewSizes(boundedSizes);
2114         mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
2115         mImageReaderFormat = format;
2116         if (usage != 0) {
2117             mTestRule.createDefaultImageReader(
2118                     mPreviewSize, format, NUM_MAX_IMAGES, usage, /*listener*/null);
2119         } else {
2120             mTestRule.createDefaultImageReader(
2121                     mPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
2122         }
2123     }
2124 
2125     private void simpleOpenCamera(String cameraId) throws Exception {
2126         mTestRule.setCamera(CameraTestUtils.openCamera(
2127                 mTestRule.getCameraManager(), cameraId,
2128                 mTestRule.getCameraListener(), mTestRule.getHandler()));
2129         mTestRule.getCollector().setCameraId(cameraId);
2130         mTestRule.setStaticInfo(new StaticMetadata(
2131                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId),
2132                 CheckLevel.ASSERT, /*collector*/null));
2133     }
2134 
2135     /**
2136      * Simple image listener that can be used to time the availability of first image.
2137      *
2138      */
2139     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
2140         private ConditionVariable imageAvailable = new ConditionVariable();
2141         private boolean imageReceived = false;
2142         private long mTimeReceivedImage = 0;
2143 
2144         @Override
2145         public void onImageAvailable(ImageReader reader) {
2146             Image image = null;
2147             if (!imageReceived) {
2148                 if (VERBOSE) {
2149                     Log.v(TAG, "First image arrives");
2150                 }
2151                 imageReceived = true;
2152                 mTimeReceivedImage = SystemClock.elapsedRealtime();
2153                 imageAvailable.open();
2154             }
2155             image = reader.acquireNextImage();
2156             if (image != null) {
2157                 image.close();
2158             }
2159         }
2160 
2161         /**
2162          * Wait for image available, return immediately if the image was already
2163          * received, otherwise wait until an image arrives.
2164          */
2165         public void waitForImageAvailable(long timeout) {
2166             if (imageReceived) {
2167                 imageReceived = false;
2168                 return;
2169             }
2170 
2171             if (imageAvailable.block(timeout)) {
2172                 imageAvailable.close();
2173                 imageReceived = true;
2174             } else {
2175                 throw new TimeoutRuntimeException("Unable to get the first image after "
2176                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
2177             }
2178         }
2179 
2180         public long getTimeReceivedImage() {
2181             return mTimeReceivedImage;
2182         }
2183     }
2184 
2185     /**
2186      * Simple image listener that behaves like a SurfaceView.
2187      */
2188     private static class SimpleTimestampListener
2189             implements ImageReader.OnImageAvailableListener {
2190         public static class TimestampHolder {
2191             public long mDeliveryTime;
2192             public long mTimestamp;
2193             TimestampHolder(long deliveryTime, long timestamp) {
2194                 mDeliveryTime = deliveryTime;
2195                 mTimestamp = timestamp;
2196             }
2197         }
2198 
2199         private final boolean mUseRealtime;
2200 
2201         private final LinkedBlockingQueue<TimestampHolder> mTimestampQueue =
2202                 new LinkedBlockingQueue<TimestampHolder>();
2203 
2204         SimpleTimestampListener(boolean timestampIsRealtime) {
2205             mUseRealtime = timestampIsRealtime;
2206         }
2207 
2208         @Override
2209         public void onImageAvailable(ImageReader reader) {
2210             try {
2211                 Image image = null;
2212                 image = reader.acquireNextImage();
2213                 if (image != null) {
2214                     long timestamp = image.getTimestamp();
2215                     long currentTimeMillis = mUseRealtime
2216                             ? SystemClock.elapsedRealtime() : SystemClock.uptimeMillis();
2217                     long currentTimeNs = currentTimeMillis * 1000000;
2218                     mTimestampQueue.put(new TimestampHolder(currentTimeNs, timestamp));
2219                     image.close();
2220                 }
2221             } catch (InterruptedException e) {
2222                 throw new UnsupportedOperationException(
2223                         "Can't handle InterruptedException in onImageAvailable");
2224             }
2225         }
2226 
2227         /**
2228          * Get the number of timestamps
2229          */
2230         public int getTimestampCount() {
2231             return mTimestampQueue.size();
2232         }
2233 
2234         /**
2235          * Get the timestamps for next image received.
2236          */
2237         public TimestampHolder getNextTimestampHolder() {
2238             TimestampHolder holder = mTimestampQueue.poll();
2239             return holder;
2240         }
2241     }
2242 
2243     private static class SimpleTimingResultListener
2244             extends CameraCaptureSession.CaptureCallback {
2245         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
2246                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
2247         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
2248                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
2249 
2250         @Override
2251         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
2252                 TotalCaptureResult result) {
2253             try {
2254                 Long time = SystemClock.elapsedRealtime();
2255                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
2256             } catch (InterruptedException e) {
2257                 throw new UnsupportedOperationException(
2258                         "Can't handle InterruptedException in onCaptureCompleted");
2259             }
2260         }
2261 
2262         @Override
2263         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
2264                 CaptureResult partialResult) {
2265             try {
2266                 // check if AE and AF state exists
2267                 Long time = -1L;
2268                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
2269                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
2270                     time = SystemClock.elapsedRealtime();
2271                 }
2272                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
2273             } catch (InterruptedException e) {
2274                 throw new UnsupportedOperationException(
2275                         "Can't handle InterruptedException in onCaptureProgressed");
2276             }
2277         }
2278 
2279         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
2280             try {
2281                 Pair<CaptureResult, Long> result =
2282                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2283                 return result;
2284             } catch (InterruptedException e) {
2285                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2286             }
2287         }
2288 
2289         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
2290             try {
2291                 Pair<CaptureResult, Long> result =
2292                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2293                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
2294                 return result;
2295             } catch (InterruptedException e) {
2296                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2297             }
2298         }
2299 
2300         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
2301                 int numResultsWait) {
2302             if (numResultsWait < 0) {
2303                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2304             }
2305 
2306             Pair<CaptureResult, Long> result;
2307             int i = 0;
2308             do {
2309                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2310                 // The result may be null if no partials are produced on this particular path, so
2311                 // stop trying
2312                 if (result == null) break;
2313                 if (result.first.getRequest().equals(myRequest)) {
2314                     return result;
2315                 }
2316             } while (i++ < numResultsWait);
2317 
2318             // No partials produced - this may not be an error, since a given device may not
2319             // produce any partials on this testing path
2320             return null;
2321         }
2322 
2323         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
2324                 int numResultsWait) {
2325             if (numResultsWait < 0) {
2326                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2327             }
2328 
2329             Pair<CaptureResult, Long> result;
2330             int i = 0;
2331             do {
2332                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2333                 if (result.first.getRequest().equals(myRequest)) {
2334                     return result;
2335                 }
2336             } while (i++ < numResultsWait);
2337 
2338             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
2339                     + "waiting for " + numResultsWait + " results");
2340         }
2341 
2342     }
2343 }
2344