1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.REPORT_LOG_NAME;
20 
21 import static com.android.ex.camera2.blocking.BlockingSessionCallback.SESSION_CLOSED;
22 
23 import static org.junit.Assert.assertNotNull;
24 import static org.junit.Assert.assertTrue;
25 
26 import android.app.Instrumentation;
27 import android.content.Context;
28 import android.graphics.ImageFormat;
29 import android.graphics.SurfaceTexture;
30 import android.hardware.camera2.CameraAccessException;
31 import android.hardware.camera2.CameraCaptureSession;
32 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
33 import android.hardware.camera2.CameraCharacteristics;
34 import android.hardware.camera2.CameraDevice;
35 import android.hardware.camera2.CaptureRequest;
36 import android.hardware.camera2.CaptureResult;
37 import android.hardware.camera2.TotalCaptureResult;
38 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
39 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
40 import android.hardware.camera2.cts.helpers.StaticMetadata;
41 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
42 import android.hardware.camera2.cts.testcases.Camera2AndroidTestRule;
43 import android.hardware.camera2.params.InputConfiguration;
44 import android.hardware.camera2.params.StreamConfigurationMap;
45 import android.media.Image;
46 import android.media.ImageReader;
47 import android.media.ImageWriter;
48 import android.os.ConditionVariable;
49 import android.os.SystemClock;
50 import android.util.Log;
51 import android.util.Pair;
52 import android.util.Range;
53 import android.util.Size;
54 import android.view.Surface;
55 
56 import androidx.test.InstrumentationRegistry;
57 
58 import com.android.compatibility.common.util.DeviceReportLog;
59 import com.android.compatibility.common.util.ResultType;
60 import com.android.compatibility.common.util.ResultUnit;
61 import com.android.compatibility.common.util.Stat;
62 import com.android.ex.camera2.blocking.BlockingSessionCallback;
63 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
64 
65 import org.junit.Rule;
66 import org.junit.Test;
67 import org.junit.runner.RunWith;
68 import org.junit.runners.JUnit4;
69 
70 import java.util.ArrayList;
71 import java.util.Arrays;
72 import java.util.List;
73 import java.util.concurrent.LinkedBlockingQueue;
74 import java.util.concurrent.TimeUnit;
75 
76 /**
77  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
78  * shutter lag etc. The KPI data will be reported in cts results.
79  */
80 @RunWith(JUnit4.class)
81 public class PerformanceTest {
82     private static final String TAG = "PerformanceTest";
83     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
84     private static final int NUM_TEST_LOOPS = 10;
85     private static final int NUM_MAX_IMAGES = 4;
86     private static final int NUM_RESULTS_WAIT = 30;
87     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
88     private final int MAX_REPROCESS_IMAGES = 6;
89     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
90     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
91     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
92     // count to maintain reasonable number of candidate image for the worse-case.
93     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
94     private final double REPROCESS_STALL_MARGIN = 0.1;
95     private static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
96     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
97     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
98     private static final long FRAME_DURATION_NS_30FPS = 33333333L;
99 
100     private DeviceReportLog mReportLog;
101 
102     // Used for reading camera output buffers.
103     private ImageReader mCameraZslReader;
104     private SimpleImageReaderListener mCameraZslImageListener;
105     // Used for reprocessing (jpeg) output.
106     private ImageReader mJpegReader;
107     private SimpleImageReaderListener mJpegListener;
108     // Used for reprocessing input.
109     private ImageWriter mWriter;
110     private SimpleCaptureCallback mZslResultListener;
111 
112     private Size mPreviewSize;
113     private Surface mPreviewSurface;
114     private SurfaceTexture mPreviewSurfaceTexture;
115     private int mImageReaderFormat;
116 
117     private static final Instrumentation mInstrumentation =
118             InstrumentationRegistry.getInstrumentation();
119     private static final Context mContext = InstrumentationRegistry.getTargetContext();
120 
121     @Rule
122     public final Camera2AndroidTestRule mTestRule = new Camera2AndroidTestRule(mContext);
123 
124     /**
125      * Test camera launch KPI: the time duration between a camera device is
126      * being opened and first preview frame is available.
127      * <p>
128      * It includes camera open time, session creation time, and sending first
129      * preview request processing latency etc. For the SurfaceView based preview use
130      * case, there is no way for client to know the exact preview frame
131      * arrival time. To approximate this time, a companion YUV420_888 stream is
132      * created. The first YUV420_888 Image coming out of the ImageReader is treated
133      * as the first preview arrival time.</p>
134      * <p>
135      * For depth-only devices, timing is done with the DEPTH16 format instead.
136      * </p>
137      */
138     @Test
testCameraLaunch()139     public void testCameraLaunch() throws Exception {
140         double[] avgCameraLaunchTimes = new double[mTestRule.getCameraIdsUnderTest().length];
141 
142         int counter = 0;
143         for (String id : mTestRule.getCameraIdsUnderTest()) {
144             // Do NOT move these variables to outer scope
145             // They will be passed to DeviceReportLog and their references will be stored
146             String streamName = "test_camera_launch";
147             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
148             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
149             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
150             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
151             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
152             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
153             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
154             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
155             try {
156                 CameraCharacteristics ch =
157                         mTestRule.getCameraManager().getCameraCharacteristics(id);
158                 mTestRule.setStaticInfo(new StaticMetadata(ch));
159                 boolean isColorOutputSupported = mTestRule.getStaticInfo().isColorOutputSupported();
160                 if (isColorOutputSupported) {
161                     initializeImageReader(id, ImageFormat.YUV_420_888);
162                 } else {
163                     assertTrue("Depth output must be supported if regular output isn't!",
164                             mTestRule.getStaticInfo().isDepthOutputSupported());
165                     initializeImageReader(id, ImageFormat.DEPTH16);
166                 }
167 
168                 SimpleImageListener imageListener = null;
169                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
170                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
171                     try {
172                         // Need create a new listener every iteration to be able to wait
173                         // for the first image comes out.
174                         imageListener = new SimpleImageListener();
175                         mTestRule.getReader().setOnImageAvailableListener(
176                                 imageListener, mTestRule.getHandler());
177                         startTimeMs = SystemClock.elapsedRealtime();
178 
179                         // Blocking open camera
180                         simpleOpenCamera(id);
181                         openTimeMs = SystemClock.elapsedRealtime();
182                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
183 
184                         // Blocking configure outputs.
185                         CaptureRequest previewRequest =
186                                 configureReaderAndPreviewOutputs(id, isColorOutputSupported);
187                         configureTimeMs = SystemClock.elapsedRealtime();
188                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
189 
190                         // Blocking start preview (start preview to first image arrives)
191                         SimpleCaptureCallback resultListener =
192                                 new SimpleCaptureCallback();
193                         blockingStartPreview(id, resultListener, previewRequest, imageListener);
194                         previewStartedTimeMs = SystemClock.elapsedRealtime();
195                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
196                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
197 
198                         // Let preview on for a couple of frames
199                         CameraTestUtils.waitForNumResults(resultListener, NUM_RESULTS_WAIT,
200                                 WAIT_FOR_RESULT_TIMEOUT_MS);
201 
202                         // Blocking stop preview
203                         startTimeMs = SystemClock.elapsedRealtime();
204                         blockingStopRepeating();
205                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
206                     }
207                     finally {
208                         // Blocking camera close
209                         startTimeMs = SystemClock.elapsedRealtime();
210                         mTestRule.closeDevice(id);
211                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
212                     }
213                 }
214 
215                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
216                 // Finish the data collection, report the KPIs.
217                 // ReportLog keys have to be lowercase underscored format.
218                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
219                         ResultUnit.MS);
220                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
221                         ResultType.LOWER_BETTER, ResultUnit.MS);
222                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
223                         ResultType.LOWER_BETTER, ResultUnit.MS);
224                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
225                         ResultType.LOWER_BETTER, ResultUnit.MS);
226                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
227                         ResultType.LOWER_BETTER, ResultUnit.MS);
228                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
229                         ResultType.LOWER_BETTER, ResultUnit.MS);
230             }
231             finally {
232                 mTestRule.closeDefaultImageReader();
233                 closePreviewSurface();
234             }
235             counter++;
236             mReportLog.submit(mInstrumentation);
237 
238             if (VERBOSE) {
239                 Log.v(TAG, "Camera " + id + " device open times(ms): "
240                         + Arrays.toString(cameraOpenTimes)
241                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
242                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
243                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
244                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
245                         + Arrays.toString(configureStreamTimes)
246                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
247                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
248                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
249                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
250                         + Arrays.toString(startPreviewTimes)
251                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
252                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
253                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
254                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
255                         + Arrays.toString(stopPreviewTimes)
256                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
257                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
258                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
259                 Log.v(TAG, "Camera " + id + " device close times(ms): "
260                         + Arrays.toString(cameraCloseTimes)
261                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
262                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
263                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
264                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
265                         + Arrays.toString(cameraLaunchTimes)
266                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
267                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
268                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
269             }
270         }
271         if (mTestRule.getCameraIdsUnderTest().length != 0) {
272             String streamName = "test_camera_launch_average";
273             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
274             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
275                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
276             mReportLog.submit(mInstrumentation);
277         }
278     }
279 
280     /**
281      * Test camera capture KPI for YUV_420_888, PRIVATE, JPEG, RAW and RAW+JPEG
282      * formats: the time duration between sending out a single image capture request
283      * and receiving image data and capture result.
284      * <p>
285      * It enumerates the following metrics: capture latency, computed by
286      * measuring the time between sending out the capture request and getting
287      * the image data; partial result latency, computed by measuring the time
288      * between sending out the capture request and getting the partial result;
289      * capture result latency, computed by measuring the time between sending
290      * out the capture request and getting the full capture result.
291      * </p>
292      */
293     @Test
testSingleCapture()294     public void testSingleCapture() throws Exception {
295         int[] JPEG_FORMAT = {ImageFormat.JPEG};
296         testSingleCaptureForFormat(JPEG_FORMAT, "jpeg", /*addPreviewDelay*/ true);
297         if (!mTestRule.isPerfMeasure()) {
298             int[] YUV_FORMAT = {ImageFormat.YUV_420_888};
299             testSingleCaptureForFormat(YUV_FORMAT, null, /*addPreviewDelay*/ false);
300             int[] PRIVATE_FORMAT = {ImageFormat.PRIVATE};
301             testSingleCaptureForFormat(PRIVATE_FORMAT, "private", /*addPreviewDelay*/ true);
302             int[] RAW_FORMAT = {ImageFormat.RAW_SENSOR};
303             testSingleCaptureForFormat(RAW_FORMAT, "raw", /*addPreviewDelay*/ true);
304             int[] RAW_JPEG_FORMATS = {ImageFormat.RAW_SENSOR, ImageFormat.JPEG};
305             testSingleCaptureForFormat(RAW_JPEG_FORMATS, "raw_jpeg", /*addPreviewDelay*/ true);
306         }
307     }
308 
appendFormatDescription(String message, String formatDescription)309     private String appendFormatDescription(String message, String formatDescription) {
310         if (message == null) {
311             return null;
312         }
313 
314         String ret = message;
315         if (formatDescription != null) {
316             ret = String.format(ret + "_%s", formatDescription);
317         }
318 
319         return ret;
320     }
321 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay)322     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
323             boolean addPreviewDelay) throws Exception {
324         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
325         double[] avgCaptureTimes = new double[mTestRule.getCameraIdsUnderTest().length];
326 
327         int counter = 0;
328         for (String id : mTestRule.getCameraIdsUnderTest()) {
329             // Do NOT move these variables to outer scope
330             // They will be passed to DeviceReportLog and their references will be stored
331             String streamName = appendFormatDescription("test_single_capture", formatDescription);
332             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
333             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
334             double[] captureTimes = new double[NUM_TEST_LOOPS];
335             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
336             double[] getResultTimes = new double[NUM_TEST_LOOPS];
337             ImageReader[] readers = null;
338             try {
339                 if (!mTestRule.getAllStaticInfo().get(id).isColorOutputSupported()) {
340                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
341                     continue;
342                 }
343 
344                 StreamConfigurationMap configMap = mTestRule.getAllStaticInfo().get(
345                         id).getCharacteristics().get(
346                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
347                 boolean formatsSupported = true;
348                 for (int format : formats) {
349                     if (!configMap.isOutputSupportedFor(format)) {
350                         Log.i(TAG, "Camera " + id + " does not support output format: " + format +
351                                 " skipping");
352                         formatsSupported = false;
353                         break;
354                     }
355                 }
356                 if (!formatsSupported) {
357                     continue;
358                 }
359 
360                 mTestRule.openDevice(id);
361 
362                 boolean partialsExpected = mTestRule.getStaticInfo().getPartialResultCount() > 1;
363                 long startTimeMs;
364                 boolean isPartialTimingValid = partialsExpected;
365                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
366 
367                     // setup builders and listeners
368                     CaptureRequest.Builder previewBuilder =
369                             mTestRule.getCamera().createCaptureRequest(
370                                     CameraDevice.TEMPLATE_PREVIEW);
371                     CaptureRequest.Builder captureBuilder =
372                             mTestRule.getCamera().createCaptureRequest(
373                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
374                     SimpleCaptureCallback previewResultListener =
375                             new SimpleCaptureCallback();
376                     SimpleTimingResultListener captureResultListener =
377                             new SimpleTimingResultListener();
378                     SimpleImageListener[] imageListeners = new SimpleImageListener[formats.length];
379                     Size[] imageSizes = new Size[formats.length];
380                     for (int j = 0; j < formats.length; j++) {
381                         Size sizeBound = mTestRule.isPerfClassTest() ? new Size(1920, 1080) : null;
382                         imageSizes[j] = CameraTestUtils.getSortedSizesForFormat(
383                                 id,
384                                 mTestRule.getCameraManager(),
385                                 formats[j],
386                                 sizeBound).get(0);
387                         imageListeners[j] = new SimpleImageListener();
388                     }
389 
390                     readers = prepareStillCaptureAndStartPreview(id, previewBuilder, captureBuilder,
391                             mTestRule.getOrderedPreviewSizes().get(0), imageSizes, formats,
392                             previewResultListener, NUM_MAX_IMAGES, imageListeners,
393                             false /*isHeic*/);
394 
395                     if (addPreviewDelay) {
396                         Thread.sleep(500);
397                     }
398 
399                     // Capture an image and get image data
400                     startTimeMs = SystemClock.elapsedRealtime();
401                     CaptureRequest request = captureBuilder.build();
402                     mTestRule.getCameraSession().capture(
403                             request, captureResultListener, mTestRule.getHandler());
404 
405                     Pair<CaptureResult, Long> partialResultNTime = null;
406                     if (partialsExpected) {
407                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
408                                 request, NUM_RESULTS_WAIT);
409                         // Even if maxPartials > 1, may not see partials for some devices
410                         if (partialResultNTime == null) {
411                             partialsExpected = false;
412                             isPartialTimingValid = false;
413                         }
414                     }
415                     Pair<CaptureResult, Long> captureResultNTime =
416                             captureResultListener.getCaptureResultNTimeForRequest(
417                                     request, NUM_RESULTS_WAIT);
418 
419                     double [] imageTimes = new double[formats.length];
420                     for (int j = 0; j < formats.length; j++) {
421                         imageListeners[j].waitForImageAvailable(
422                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
423                         imageTimes[j] = imageListeners[j].getTimeReceivedImage();
424                     }
425 
426                     captureTimes[i] = Stat.getAverage(imageTimes) - startTimeMs;
427                     if (partialsExpected) {
428                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
429                         if (getPartialTimes[i] < 0) {
430                             isPartialTimingValid = false;
431                         }
432                     }
433                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
434 
435                     // simulate real scenario (preview runs a bit)
436                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
437                             WAIT_FOR_RESULT_TIMEOUT_MS);
438 
439                     blockingStopRepeating();
440 
441                     CameraTestUtils.closeImageReaders(readers);
442                     readers = null;
443                 }
444                 String message = appendFormatDescription("camera_capture_latency",
445                         formatDescription);
446                 mReportLog.addValues(message, captureTimes, ResultType.LOWER_BETTER, ResultUnit.MS);
447                 // If any of the partial results do not contain AE and AF state, then no report
448                 if (isPartialTimingValid) {
449                     message = appendFormatDescription("camera_partial_result_latency",
450                             formatDescription);
451                     mReportLog.addValues(message, getPartialTimes, ResultType.LOWER_BETTER,
452                             ResultUnit.MS);
453                 }
454                 message = appendFormatDescription("camera_capture_result_latency",
455                         formatDescription);
456                 mReportLog.addValues(message, getResultTimes, ResultType.LOWER_BETTER,
457                         ResultUnit.MS);
458 
459                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
460                 avgCaptureTimes[counter] = Stat.getAverage(captureTimes);
461             }
462             finally {
463                 CameraTestUtils.closeImageReaders(readers);
464                 readers = null;
465                 mTestRule.closeDevice(id);
466                 closePreviewSurface();
467             }
468             counter++;
469             mReportLog.submit(mInstrumentation);
470         }
471 
472         // Result will not be reported in CTS report if no summary is printed.
473         if (mTestRule.getCameraIdsUnderTest().length != 0) {
474             String streamName = appendFormatDescription("test_single_capture_average",
475                     formatDescription);
476             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
477             // In performance measurement mode, capture the buffer latency rather than result
478             // latency.
479             if (mTestRule.isPerfMeasure()) {
480                 String message = appendFormatDescription(
481                         "camera_capture_average_latency_for_all_cameras", formatDescription);
482                 mReportLog.setSummary(message, Stat.getAverage(avgCaptureTimes),
483                         ResultType.LOWER_BETTER, ResultUnit.MS);
484             } else {
485                 String message = appendFormatDescription(
486                         "camera_capture_result_average_latency_for_all_cameras", formatDescription);
487                 mReportLog.setSummary(message, Stat.getAverage(avgResultTimes),
488                         ResultType.LOWER_BETTER, ResultUnit.MS);
489             }
490             mReportLog.submit(mInstrumentation);
491         }
492     }
493 
494     /**
495      * Test multiple capture KPI for YUV_420_888 format: the average time duration
496      * between sending out image capture requests and receiving capture results.
497      * <p>
498      * It measures capture latency, which is the time between sending out the capture
499      * request and getting the full capture result, and the frame duration, which is the timestamp
500      * gap between results.
501      * </p>
502      */
503     @Test
testMultipleCapture()504     public void testMultipleCapture() throws Exception {
505         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
506         double[] avgDurationMs = new double[mTestRule.getCameraIdsUnderTest().length];
507 
508         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
509         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
510             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
511             private int captureQueueEmptied = 0;
512 
513             @Override
514             public void onConfigured(CameraCaptureSession session) {
515                 // Empty implementation
516             }
517 
518             @Override
519             public void onConfigureFailed(CameraCaptureSession session) {
520                 // Empty implementation
521             }
522 
523             @Override
524             public void onCaptureQueueEmpty(CameraCaptureSession session) {
525                 captureQueueEmptied++;
526                 if (VERBOSE) {
527                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
528                             + captureQueueEmptied);
529                 }
530 
531                 captureQueueEmptyCond.open();
532             }
533 
534             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
535              * already received, otherwise, wait for one to arrive. */
536             public void waitForCaptureQueueEmpty(long timeout) {
537                 if (captureQueueEmptied > 0) {
538                     captureQueueEmptied--;
539                     return;
540                 }
541 
542                 if (captureQueueEmptyCond.block(timeout)) {
543                     captureQueueEmptyCond.close();
544                     captureQueueEmptied = 0;
545                 } else {
546                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
547                             + timeout + "ms");
548                 }
549             }
550         }
551 
552         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
553 
554         int counter = 0;
555         for (String id : mTestRule.getCameraIdsUnderTest()) {
556             // Do NOT move these variables to outer scope
557             // They will be passed to DeviceReportLog and their references will be stored
558             String streamName = "test_multiple_capture";
559             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
560             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
561             long[] startTimes = new long[NUM_MAX_IMAGES];
562             double[] getResultTimes = new double[NUM_MAX_IMAGES];
563             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
564             try {
565                 StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
566                 if (!staticMetadata.isColorOutputSupported()) {
567                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
568                     continue;
569                 }
570                 boolean useSessionKeys = isFpsRangeASessionKey(staticMetadata.getCharacteristics());
571 
572                 mTestRule.openDevice(id);
573                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
574 
575                     // setup builders and listeners
576                     CaptureRequest.Builder previewBuilder =
577                             mTestRule.getCamera().createCaptureRequest(
578                                     CameraDevice.TEMPLATE_PREVIEW);
579                     CaptureRequest.Builder captureBuilder =
580                             mTestRule.getCamera().createCaptureRequest(
581                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
582                     SimpleCaptureCallback previewResultListener =
583                             new SimpleCaptureCallback();
584                     SimpleTimingResultListener captureResultListener =
585                             new SimpleTimingResultListener();
586                     SimpleImageReaderListener imageListener =
587                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
588 
589                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
590                             id, mTestRule.getCameraManager(),
591                             ImageFormat.YUV_420_888, /*bound*/null).get(0);
592                     // Find minimum frame duration for YUV_420_888
593                     StreamConfigurationMap config =
594                             mTestRule.getStaticInfo().getCharacteristics().get(
595                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
596 
597                     final long minStillFrameDuration =
598                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
599                     if (minStillFrameDuration > 0) {
600                         Range<Integer> targetRange =
601                                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
602                                         minStillFrameDuration, mTestRule.getStaticInfo());
603                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
604                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
605                     }
606 
607                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
608                             mTestRule.getOrderedPreviewSizes().get(0), maxYuvSize,
609                             ImageFormat.YUV_420_888, previewResultListener,
610                             sessionListener, NUM_MAX_IMAGES, imageListener,
611                             useSessionKeys);
612 
613                     // Converge AE
614                     CameraTestUtils.waitForAeStable(previewResultListener,
615                             NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY, mTestRule.getStaticInfo(),
616                             WAIT_FOR_RESULT_TIMEOUT_MS, NUM_RESULTS_WAIT_TIMEOUT);
617 
618                     if (mTestRule.getStaticInfo().isAeLockSupported()) {
619                         // Lock AE if possible to improve stability
620                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
621                         mTestRule.getCameraSession().setRepeatingRequest(previewBuilder.build(),
622                                 previewResultListener, mTestRule.getHandler());
623                         if (mTestRule.getStaticInfo().isHardwareLevelAtLeastLimited()) {
624                             // Legacy mode doesn't output AE state
625                             CameraTestUtils.waitForResultValue(previewResultListener,
626                                     CaptureResult.CONTROL_AE_STATE,
627                                     CaptureResult.CONTROL_AE_STATE_LOCKED,
628                                     NUM_RESULTS_WAIT_TIMEOUT, WAIT_FOR_RESULT_TIMEOUT_MS);
629                         }
630                     }
631 
632                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
633                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
634 
635                         // Capture an image and get image data
636                         startTimes[j] = SystemClock.elapsedRealtime();
637                         CaptureRequest request = captureBuilder.build();
638                         mTestRule.getCameraSession().capture(
639                                 request, captureResultListener, mTestRule.getHandler());
640 
641                         // Wait for capture queue empty for the current request
642                         sessionListener.waitForCaptureQueueEmpty(
643                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
644                     }
645 
646                     // Acquire the capture result time and frame duration
647                     long prevTimestamp = -1;
648                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
649                         Pair<CaptureResult, Long> captureResultNTime =
650                                 captureResultListener.getCaptureResultNTime(
651                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
652 
653                         getResultTimes[j] +=
654                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
655 
656                         // Collect inter-frame timestamp
657                         long timestamp = captureResultNTime.first.get(
658                                 CaptureResult.SENSOR_TIMESTAMP);
659                         if (prevTimestamp != -1) {
660                             frameDurationMs[j-1] +=
661                                     (double)(timestamp - prevTimestamp)/(
662                                             NUM_TEST_LOOPS * 1000000.0);
663                         }
664                         prevTimestamp = timestamp;
665                     }
666 
667                     // simulate real scenario (preview runs a bit)
668                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
669                             WAIT_FOR_RESULT_TIMEOUT_MS);
670 
671                     stopRepeating();
672                 }
673 
674                 for (int i = 0; i < getResultTimes.length; i++) {
675                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
676                             getResultTimes[i] + " ms");
677                 }
678                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
679                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
680                             frameDurationMs[i] + " ms");
681                 }
682 
683                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
684                         ResultType.LOWER_BETTER, ResultUnit.MS);
685                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
686                         ResultType.LOWER_BETTER, ResultUnit.MS);
687 
688 
689                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
690                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
691             }
692             finally {
693                 mTestRule.closeDefaultImageReader();
694                 mTestRule.closeDevice(id);
695                 closePreviewSurface();
696             }
697             counter++;
698             mReportLog.submit(mInstrumentation);
699         }
700 
701         // Result will not be reported in CTS report if no summary is printed.
702         if (mTestRule.getCameraIdsUnderTest().length != 0) {
703             String streamName = "test_multiple_capture_average";
704             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
705             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
706                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
707             mReportLog.submit(mInstrumentation);
708             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
709             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
710                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
711             mReportLog.submit(mInstrumentation);
712         }
713     }
714 
715     /**
716      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
717      * a reprocess request is issued to the time the reprocess image is returned.
718      */
719     @Test
testReprocessingLatency()720     public void testReprocessingLatency() throws Exception {
721         for (String id : mTestRule.getCameraIdsUnderTest()) {
722             for (int format : REPROCESS_FORMATS) {
723                 if (!isReprocessSupported(id, format)) {
724                     continue;
725                 }
726 
727                 try {
728                     mTestRule.openDevice(id);
729                     String streamName = "test_reprocessing_latency";
730                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
731                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
732                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
733                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
734                             /*highQuality*/false);
735                 } finally {
736                     closeReaderWriters();
737                     mTestRule.closeDevice(id);
738                     closePreviewSurface();
739                     mReportLog.submit(mInstrumentation);
740                 }
741             }
742         }
743     }
744 
745     /**
746      * Test reprocessing throughput with default NR and edge options,
747      * i.e., how many frames can be reprocessed during a given amount of time.
748      *
749      */
750     @Test
testReprocessingThroughput()751     public void testReprocessingThroughput() throws Exception {
752         for (String id : mTestRule.getCameraIdsUnderTest()) {
753             for (int format : REPROCESS_FORMATS) {
754                 if (!isReprocessSupported(id, format)) {
755                     continue;
756                 }
757 
758                 try {
759                     mTestRule.openDevice(id);
760                     String streamName = "test_reprocessing_throughput";
761                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
762                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
763                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
764                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
765                             /*highQuality*/false);
766                 } finally {
767                     closeReaderWriters();
768                     mTestRule.closeDevice(id);
769                     closePreviewSurface();
770                     mReportLog.submit(mInstrumentation);
771                 }
772             }
773         }
774     }
775 
776     /**
777      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
778      * time a reprocess request is issued to the time the reprocess image is returned.
779      */
780     @Test
testHighQualityReprocessingLatency()781     public void testHighQualityReprocessingLatency() throws Exception {
782         for (String id : mTestRule.getCameraIdsUnderTest()) {
783             for (int format : REPROCESS_FORMATS) {
784                 if (!isReprocessSupported(id, format)) {
785                     continue;
786                 }
787 
788                 try {
789                     mTestRule.openDevice(id);
790                     String streamName = "test_high_quality_reprocessing_latency";
791                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
792                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
793                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
794                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
795                             /*requireHighQuality*/true);
796                 } finally {
797                     closeReaderWriters();
798                     mTestRule.closeDevice(id);
799                     closePreviewSurface();
800                     mReportLog.submit(mInstrumentation);
801                 }
802             }
803         }
804     }
805 
806     /**
807      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
808      * be reprocessed during a given amount of time.
809      *
810      */
811     @Test
testHighQualityReprocessingThroughput()812     public void testHighQualityReprocessingThroughput() throws Exception {
813         for (String id : mTestRule.getCameraIdsUnderTest()) {
814             for (int format : REPROCESS_FORMATS) {
815                 if (!isReprocessSupported(id, format)) {
816                     continue;
817                 }
818 
819                 try {
820                     mTestRule.openDevice(id);
821                     String streamName = "test_high_quality_reprocessing_throughput";
822                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
823                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
824                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
825                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
826                             /*requireHighQuality*/true);
827                 } finally {
828                     closeReaderWriters();
829                     mTestRule.closeDevice(id);
830                     closePreviewSurface();
831                     mReportLog.submit(mInstrumentation);
832                 }
833             }
834         }
835     }
836 
837     /**
838      * Testing reprocessing caused preview stall (frame drops)
839      */
840     @Test
testReprocessingCaptureStall()841     public void testReprocessingCaptureStall() throws Exception {
842         for (String id : mTestRule.getCameraIdsUnderTest()) {
843             for (int format : REPROCESS_FORMATS) {
844                 if (!isReprocessSupported(id, format)) {
845                     continue;
846                 }
847 
848                 try {
849                     mTestRule.openDevice(id);
850                     String streamName = "test_reprocessing_capture_stall";
851                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
852                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
853                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
854                     reprocessingCaptureStallTestByCamera(format);
855                 } finally {
856                     closeReaderWriters();
857                     mTestRule.closeDevice(id);
858                     closePreviewSurface();
859                     mReportLog.submit(mInstrumentation);
860                 }
861             }
862         }
863     }
864 
reprocessingCaptureStallTestByCamera(int reprocessInputFormat)865     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
866         prepareReprocessCapture(reprocessInputFormat);
867 
868         // Let it stream for a while before reprocessing
869         startZslStreaming();
870         waitForFrames(NUM_RESULTS_WAIT);
871 
872         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
873         // Prepare several reprocessing request
874         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
875         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
876         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
877             inputImages[i] =
878                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
879             TotalCaptureResult zslResult =
880                     mZslResultListener.getCaptureResult(
881                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
882             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
883             reprocessReqs[i].addTarget(mJpegReader.getSurface());
884             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
885                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
886             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
887                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
888             mWriter.queueInputImage(inputImages[i]);
889         }
890 
891         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
892         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
893         Arrays.fill(averageFrameDurationMs, 0.0);
894         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
895         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
896         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
897             mZslResultListener.drain();
898             CaptureRequest reprocessRequest = reprocessReqs[i].build();
899             mTestRule.getCameraSession().capture(
900                     reprocessRequest, reprocessResultListener, mTestRule.getHandler());
901             // Wait for reprocess output jpeg and result come back.
902             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
903                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
904             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
905             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
906             assertTrue("Reprocess capture result should be returned in "
907                             + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
908                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
909 
910             // Need look longer time, as the stutter could happen after the reprocessing
911             // output frame is received.
912             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
913             Arrays.fill(timestampGap, 0);
914             CaptureResult[] results = new CaptureResult[timestampGap.length];
915             long[] frameDurationsNs = new long[timestampGap.length];
916             for (int j = 0; j < results.length; j++) {
917                 results[j] = mZslResultListener.getCaptureResult(
918                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
919                 if (j > 0) {
920                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
921                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
922                     assertTrue("Time stamp should be monotonically increasing",
923                             timestampGap[j] > 0);
924                 }
925                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
926             }
927 
928             if (VERBOSE) {
929                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
930                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
931             }
932 
933             // Get the number of candidate results, calculate the average frame duration
934             // and max timestamp gap.
935             Arrays.sort(timestampGap);
936             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
937             for (int m = 0; m < frameDurationsNs.length; m++) {
938                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
939             }
940             averageFrameDurationMs[i] /= frameDurationsNs.length;
941 
942             maxCaptureGapsMs[i] = maxTimestampGapMs;
943         }
944 
945         blockingStopRepeating();
946 
947         String reprocessType = "YUV reprocessing";
948         if (reprocessInputFormat == ImageFormat.PRIVATE) {
949             reprocessType = "opaque reprocessing";
950         }
951         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
952         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
953                 ResultType.LOWER_BETTER, ResultUnit.MS);
954         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
955                 ResultType.LOWER_BETTER, ResultUnit.MS);
956         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
957                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
958 
959         // The max timestamp gap should be less than (captureStall + 1) x average frame
960         // duration * (1 + error margin).
961         int maxCaptureStallFrames = mTestRule.getStaticInfo().getMaxCaptureStallOrDefault();
962         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
963             double stallDurationBound = averageFrameDurationMs[i] *
964                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
965             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
966                     maxCaptureGapsMs[i] <= stallDurationBound);
967         }
968     }
969 
reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode, boolean requireHighQuality)970     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
971             boolean requireHighQuality)
972             throws Exception {
973         // Prepare the reprocessing capture
974         prepareReprocessCapture(reprocessInputFormat);
975 
976         // Start ZSL streaming
977         startZslStreaming();
978         waitForFrames(NUM_RESULTS_WAIT);
979 
980         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
981         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
982         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
983         long startTimeMs;
984         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
985             inputImages[i] =
986                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
987             TotalCaptureResult zslResult =
988                     mZslResultListener.getCaptureResult(
989                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
990             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
991             if (requireHighQuality) {
992                 // Reprocessing should support high quality for NR and edge modes.
993                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
994                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
995                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
996                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
997             }
998             reprocessReqs[i].addTarget(mJpegReader.getSurface());
999         }
1000 
1001         if (asyncMode) {
1002             // async capture: issue all the reprocess requests as quick as possible, then
1003             // check the throughput of the output jpegs.
1004             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1005                 // Could be slow for YUV reprocessing, do it in advance.
1006                 mWriter.queueInputImage(inputImages[i]);
1007             }
1008 
1009             // Submit the requests
1010             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1011                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1012             }
1013 
1014             // Get images
1015             startTimeMs = SystemClock.elapsedRealtime();
1016             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1017             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1018                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1019                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1020                 startTimeMs = SystemClock.elapsedRealtime();
1021             }
1022             for (Image i : jpegImages) {
1023                 i.close();
1024             }
1025         } else {
1026             // sync capture: issue reprocess request one by one, only submit next one when
1027             // the previous capture image is returned. This is to test the back to back capture
1028             // performance.
1029             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1030             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1031                 startTimeMs = SystemClock.elapsedRealtime();
1032                 mWriter.queueInputImage(inputImages[i]);
1033                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1034                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1035                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1036             }
1037             for (Image i : jpegImages) {
1038                 i.close();
1039             }
1040         }
1041 
1042         blockingStopRepeating();
1043 
1044         String reprocessType = "YUV reprocessing";
1045         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1046             reprocessType = "opaque reprocessing";
1047         }
1048 
1049         // Report the performance data
1050         String captureMsg;
1051         if (asyncMode) {
1052             captureMsg = "capture latency";
1053             if (requireHighQuality) {
1054                 captureMsg += " for High Quality noise reduction and edge modes";
1055             }
1056             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1057                     ResultUnit.NONE);
1058             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1059                     ResultUnit.NONE);
1060             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1061                     ResultUnit.MS);
1062             mReportLog.setSummary("camera_reprocessing_average_latency",
1063                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1064         } else {
1065             captureMsg = "shot to shot latency";
1066             if (requireHighQuality) {
1067                 captureMsg += " for High Quality noise reduction and edge modes";
1068             }
1069             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1070                     ResultUnit.NONE);
1071             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1072                     ResultUnit.NONE);
1073             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1074                     ResultUnit.MS);
1075             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
1076                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1077         }
1078     }
1079 
1080     /**
1081      * Start preview and ZSL streaming
1082      */
startZslStreaming()1083     private void startZslStreaming() throws Exception {
1084         CaptureRequest.Builder zslBuilder =
1085                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
1086         zslBuilder.addTarget(mPreviewSurface);
1087         zslBuilder.addTarget(mCameraZslReader.getSurface());
1088         mTestRule.getCameraSession().setRepeatingRequest(
1089                 zslBuilder.build(), mZslResultListener, mTestRule.getHandler());
1090     }
1091 
1092     /**
1093      * Wait for a certain number of frames, the images and results will be drained from the
1094      * listeners to make sure that next reprocessing can get matched results and images.
1095      *
1096      * @param numFrameWait The number of frames to wait before return, 0 means that
1097      *      this call returns immediately after streaming on.
1098      */
waitForFrames(int numFrameWait)1099     private void waitForFrames(int numFrameWait) throws Exception {
1100         if (numFrameWait < 0) {
1101             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
1102                     " should be non-negative");
1103         }
1104 
1105         for (int i = 0; i < numFrameWait; i++) {
1106             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1107         }
1108     }
1109 
closeReaderWriters()1110     private void closeReaderWriters() {
1111         mCameraZslImageListener.drain();
1112         CameraTestUtils.closeImageReader(mCameraZslReader);
1113         mCameraZslReader = null;
1114         mJpegListener.drain();
1115         CameraTestUtils.closeImageReader(mJpegReader);
1116         mJpegReader = null;
1117         CameraTestUtils.closeImageWriter(mWriter);
1118         mWriter = null;
1119     }
1120 
prepareReprocessCapture(int inputFormat)1121     private void prepareReprocessCapture(int inputFormat)
1122             throws CameraAccessException {
1123         // 1. Find the right preview and capture sizes.
1124         Size maxPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1125         Size[] supportedInputSizes =
1126                 mTestRule.getStaticInfo().getAvailableSizesForFormatChecked(inputFormat,
1127                         StaticMetadata.StreamDirection.Input);
1128         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
1129         Size maxJpegSize = mTestRule.getOrderedStillSizes().get(0);
1130         updatePreviewSurface(maxPreviewSize);
1131         mZslResultListener = new SimpleCaptureCallback();
1132 
1133         // 2. Create camera output ImageReaders.
1134         // YUV/Opaque output, camera should support output with input size/format
1135         mCameraZslImageListener = new SimpleImageReaderListener(
1136                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
1137         mCameraZslReader = CameraTestUtils.makeImageReader(
1138                 maxInputSize, inputFormat, MAX_ZSL_IMAGES,
1139                 mCameraZslImageListener, mTestRule.getHandler());
1140         // Jpeg reprocess output
1141         mJpegListener = new SimpleImageReaderListener();
1142         mJpegReader = CameraTestUtils.makeImageReader(
1143                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES,
1144                 mJpegListener, mTestRule.getHandler());
1145 
1146         // create camera reprocess session
1147         List<Surface> outSurfaces = new ArrayList<Surface>();
1148         outSurfaces.add(mPreviewSurface);
1149         outSurfaces.add(mCameraZslReader.getSurface());
1150         outSurfaces.add(mJpegReader.getSurface());
1151         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
1152                 maxInputSize.getHeight(), inputFormat);
1153         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1154         mTestRule.setCameraSession(CameraTestUtils.configureReprocessableCameraSession(
1155                 mTestRule.getCamera(), inputConfig, outSurfaces,
1156                 mTestRule.getCameraSessionListener(), mTestRule.getHandler()));
1157 
1158         // 3. Create ImageWriter for input
1159         mWriter = CameraTestUtils.makeImageWriter(
1160                 mTestRule.getCameraSession().getInputSurface(), MAX_INPUT_IMAGES,
1161                 /*listener*/null, /*handler*/null);
1162     }
1163 
1164     /**
1165      * Stop repeating requests for current camera and waiting for it to go back to idle, resulting
1166      * in an idle device.
1167      */
blockingStopRepeating()1168     private void blockingStopRepeating() throws Exception {
1169         stopRepeating();
1170         mTestRule.getCameraSessionListener().getStateWaiter().waitForState(
1171                 BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
1172     }
1173 
blockingStartPreview(String id, CaptureCallback listener, CaptureRequest previewRequest, SimpleImageListener imageListener)1174     private void blockingStartPreview(String id, CaptureCallback listener,
1175             CaptureRequest previewRequest, SimpleImageListener imageListener)
1176             throws Exception {
1177         mTestRule.getCameraSession().setRepeatingRequest(
1178                 previewRequest, listener, mTestRule.getHandler());
1179         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1180     }
1181 
1182     /**
1183      * Setup still capture configuration and start preview.
1184      *
1185      * @param id The camera id under test
1186      * @param previewBuilder The capture request builder to be used for preview
1187      * @param stillBuilder The capture request builder to be used for still capture
1188      * @param previewSz Preview size
1189      * @param captureSizes Still capture sizes
1190      * @param formats The single capture image formats
1191      * @param resultListener Capture result listener
1192      * @param maxNumImages The max number of images set to the image reader
1193      * @param imageListeners The single capture capture image listeners
1194      * @param isHeic Capture HEIC image if true, JPEG image if false
1195      */
prepareStillCaptureAndStartPreview(String id, CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder, Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener, int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners, boolean isHeic)1196     private ImageReader[] prepareStillCaptureAndStartPreview(String id,
1197             CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder,
1198             Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener,
1199             int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners,
1200             boolean isHeic)
1201             throws Exception {
1202 
1203         if ((captureSizes == null) || (formats == null) || (imageListeners == null) &&
1204                 (captureSizes.length != formats.length) ||
1205                 (formats.length != imageListeners.length)) {
1206             throw new IllegalArgumentException("Invalid capture sizes/formats or image listeners!");
1207         }
1208 
1209         if (VERBOSE) {
1210             Log.v(TAG, String.format("Prepare still capture and preview (%s)",
1211                     previewSz.toString()));
1212         }
1213 
1214         // Update preview size.
1215         updatePreviewSurface(previewSz);
1216 
1217         ImageReader[] readers = new ImageReader[captureSizes.length];
1218         List<Surface> outputSurfaces = new ArrayList<Surface>();
1219         outputSurfaces.add(mPreviewSurface);
1220         for (int i = 0; i < captureSizes.length; i++) {
1221             readers[i] = CameraTestUtils.makeImageReader(captureSizes[i], formats[i], maxNumImages,
1222                     imageListeners[i], mTestRule.getHandler());
1223             outputSurfaces.add(readers[i].getSurface());
1224         }
1225 
1226         // Configure the requests.
1227         previewBuilder.addTarget(mPreviewSurface);
1228         stillBuilder.addTarget(mPreviewSurface);
1229         for (int i = 0; i < readers.length; i++) {
1230             stillBuilder.addTarget(readers[i].getSurface());
1231         }
1232 
1233         // Update target fps based on the min frame duration of preview.
1234         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1235         StreamConfigurationMap config = ch.get(
1236                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1237         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS, config.getOutputMinFrameDuration(
1238                 SurfaceTexture.class, previewSz));
1239         Range<Integer> targetRange =
1240                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1241                 minFrameDuration, mTestRule.getStaticInfo());
1242         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1243         stillBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1244 
1245         CaptureRequest previewRequest = previewBuilder.build();
1246         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1247         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1248         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1249 
1250         // Start preview.
1251         mTestRule.getCameraSession().setRepeatingRequest(
1252                 previewRequest, resultListener, mTestRule.getHandler());
1253 
1254         return readers;
1255     }
1256 
1257     /**
1258      * Helper function to check if TARGET_FPS_RANGE is a session parameter
1259      */
isFpsRangeASessionKey(CameraCharacteristics ch)1260     private boolean isFpsRangeASessionKey(CameraCharacteristics ch) {
1261         List<CaptureRequest.Key<?>> sessionKeys = ch.getAvailableSessionKeys();
1262         return sessionKeys != null &&
1263                 sessionKeys.contains(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
1264     }
1265 
1266     /**
1267      * Helper function to configure camera session using parameters provided.
1268      */
configureAndSetCameraSession(List<Surface> surfaces, boolean useInitialRequest, CaptureRequest initialRequest)1269     private void configureAndSetCameraSession(List<Surface> surfaces,
1270             boolean useInitialRequest, CaptureRequest initialRequest)
1271             throws CameraAccessException {
1272         CameraCaptureSession cameraSession;
1273         if (useInitialRequest) {
1274             cameraSession = CameraTestUtils.configureCameraSessionWithParameters(
1275                 mTestRule.getCamera(), surfaces,
1276                 mTestRule.getCameraSessionListener(), mTestRule.getHandler(),
1277                 initialRequest);
1278         } else {
1279             cameraSession = CameraTestUtils.configureCameraSession(
1280                 mTestRule.getCamera(), surfaces,
1281                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1282         }
1283         mTestRule.setCameraSession(cameraSession);
1284     }
1285 
1286     /**
1287      * Setup single capture configuration and start preview.
1288      *
1289      * @param previewBuilder The capture request builder to be used for preview
1290      * @param stillBuilder The capture request builder to be used for still capture
1291      * @param previewSz Preview size
1292      * @param captureSz Still capture size
1293      * @param format The single capture image format
1294      * @param resultListener Capture result listener
1295      * @param sessionListener Session listener
1296      * @param maxNumImages The max number of images set to the image reader
1297      * @param imageListener The single capture capture image listener
1298      * @param useSessionKeys Create capture session using session keys from previewRequest
1299      */
prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format, CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener, int maxNumImages, ImageReader.OnImageAvailableListener imageListener, boolean useSessionKeys)1300     private void prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder,
1301             CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format,
1302             CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener,
1303             int maxNumImages, ImageReader.OnImageAvailableListener imageListener,
1304             boolean  useSessionKeys) throws Exception {
1305         if ((captureSz == null) || (imageListener == null)) {
1306             throw new IllegalArgumentException("Invalid capture size or image listener!");
1307         }
1308 
1309         if (VERBOSE) {
1310             Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
1311                     captureSz.toString(), previewSz.toString()));
1312         }
1313 
1314         // Update preview size.
1315         updatePreviewSurface(previewSz);
1316 
1317         // Create ImageReader.
1318         mTestRule.createDefaultImageReader(captureSz, format, maxNumImages, imageListener);
1319 
1320         // Configure output streams with preview and jpeg streams.
1321         List<Surface> outputSurfaces = new ArrayList<Surface>();
1322         outputSurfaces.add(mPreviewSurface);
1323         outputSurfaces.add(mTestRule.getReaderSurface());
1324         if (sessionListener == null) {
1325             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1326         } else {
1327             mTestRule.setCameraSessionListener(new BlockingSessionCallback(sessionListener));
1328         }
1329 
1330         // Configure the requests.
1331         previewBuilder.addTarget(mPreviewSurface);
1332         stillBuilder.addTarget(mPreviewSurface);
1333         stillBuilder.addTarget(mTestRule.getReaderSurface());
1334         CaptureRequest previewRequest = previewBuilder.build();
1335 
1336         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1337 
1338         // Start preview.
1339         mTestRule.getCameraSession().setRepeatingRequest(
1340                 previewRequest, resultListener, mTestRule.getHandler());
1341     }
1342 
1343     /**
1344      * Update the preview surface size.
1345      *
1346      * @param size The preview size to be updated.
1347      */
updatePreviewSurface(Size size)1348     private void updatePreviewSurface(Size size) {
1349         if ((mPreviewSurfaceTexture != null ) || (mPreviewSurface != null)) {
1350             closePreviewSurface();
1351         }
1352 
1353         mPreviewSurfaceTexture = new SurfaceTexture(/*random int*/ 1);
1354         mPreviewSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
1355         mPreviewSurface = new Surface(mPreviewSurfaceTexture);
1356     }
1357 
1358     /**
1359      * Release preview surface and corresponding surface texture.
1360      */
closePreviewSurface()1361     private void closePreviewSurface() {
1362         if (mPreviewSurface != null) {
1363             mPreviewSurface.release();
1364             mPreviewSurface = null;
1365         }
1366 
1367         if (mPreviewSurfaceTexture != null) {
1368             mPreviewSurfaceTexture.release();
1369             mPreviewSurfaceTexture = null;
1370         }
1371     }
1372 
isReprocessSupported(String cameraId, int format)1373     private boolean isReprocessSupported(String cameraId, int format)
1374             throws CameraAccessException {
1375         if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
1376             throw new IllegalArgumentException(
1377                     "format " + format + " is not supported for reprocessing");
1378         }
1379 
1380         StaticMetadata info = new StaticMetadata(
1381                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId), CheckLevel.ASSERT,
1382                 /*collector*/ null);
1383         int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
1384         if (format == ImageFormat.PRIVATE) {
1385             cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
1386         }
1387         return info.isCapabilitySupported(cap);
1388     }
1389 
1390     /**
1391      * Stop the repeating requests of current camera.
1392      * Does _not_ wait for the device to go idle
1393      */
stopRepeating()1394     private void stopRepeating() throws Exception {
1395         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1396         if (mTestRule.getCameraSession() != null) {
1397             if (VERBOSE) Log.v(TAG, "Stopping preview");
1398             mTestRule.getCameraSession().stopRepeating();
1399         }
1400     }
1401 
1402     /**
1403      * Configure reader and preview outputs and wait until done.
1404      *
1405      * @return The preview capture request
1406      */
configureReaderAndPreviewOutputs( String id, boolean isColorOutputSupported)1407     private CaptureRequest configureReaderAndPreviewOutputs(
1408             String id, boolean isColorOutputSupported)
1409             throws Exception {
1410         if (mPreviewSurface == null || mTestRule.getReaderSurface() == null) {
1411             throw new IllegalStateException("preview and reader surface must be initilized first");
1412         }
1413 
1414         // Create previewBuilder
1415         CaptureRequest.Builder previewBuilder =
1416                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1417         if (isColorOutputSupported) {
1418             previewBuilder.addTarget(mPreviewSurface);
1419         }
1420         previewBuilder.addTarget(mTestRule.getReaderSurface());
1421 
1422 
1423         // Figure out constant target FPS range no larger than 30fps
1424         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1425         StreamConfigurationMap config =
1426                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1427         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
1428                 config.getOutputMinFrameDuration(mImageReaderFormat, mPreviewSize));
1429 
1430         List<Surface> outputSurfaces = new ArrayList<>();
1431         outputSurfaces.add(mTestRule.getReaderSurface());
1432         if (isColorOutputSupported) {
1433             outputSurfaces.add(mPreviewSurface);
1434             minFrameDuration = Math.max(minFrameDuration,
1435                     config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
1436         }
1437         Range<Integer> targetRange =
1438                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1439                         minFrameDuration, mTestRule.getStaticInfo());
1440         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1441 
1442         // Create capture session
1443         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1444         CaptureRequest previewRequest = previewBuilder.build();
1445         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1446         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1447 
1448         return previewRequest;
1449     }
1450 
1451     /**
1452      * Initialize the ImageReader instance and preview surface.
1453      * @param cameraId The camera to be opened.
1454      * @param format The format used to create ImageReader instance.
1455      */
initializeImageReader(String cameraId, int format)1456     private void initializeImageReader(String cameraId, int format) throws Exception {
1457         mTestRule.setOrderedPreviewSizes(CameraTestUtils.getSortedSizesForFormat(
1458                 cameraId, mTestRule.getCameraManager(), format,
1459                 CameraTestUtils.getPreviewSizeBound(mTestRule.getWindowManager(),
1460                         CameraTestUtils.PREVIEW_SIZE_BOUND)));
1461         mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1462         mImageReaderFormat = format;
1463         mTestRule.createDefaultImageReader(
1464                 mPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
1465         updatePreviewSurface(mPreviewSize);
1466     }
1467 
simpleOpenCamera(String cameraId)1468     private void simpleOpenCamera(String cameraId) throws Exception {
1469         mTestRule.setCamera(CameraTestUtils.openCamera(
1470                 mTestRule.getCameraManager(), cameraId,
1471                 mTestRule.getCameraListener(), mTestRule.getHandler()));
1472         mTestRule.getCollector().setCameraId(cameraId);
1473         mTestRule.setStaticInfo(new StaticMetadata(
1474                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId),
1475                 CheckLevel.ASSERT, /*collector*/null));
1476     }
1477 
1478     /**
1479      * Simple image listener that can be used to time the availability of first image.
1480      *
1481      */
1482     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
1483         private ConditionVariable imageAvailable = new ConditionVariable();
1484         private boolean imageReceived = false;
1485         private long mTimeReceivedImage = 0;
1486 
1487         @Override
onImageAvailable(ImageReader reader)1488         public void onImageAvailable(ImageReader reader) {
1489             Image image = null;
1490             if (!imageReceived) {
1491                 if (VERBOSE) {
1492                     Log.v(TAG, "First image arrives");
1493                 }
1494                 imageReceived = true;
1495                 mTimeReceivedImage = SystemClock.elapsedRealtime();
1496                 imageAvailable.open();
1497             }
1498             image = reader.acquireNextImage();
1499             if (image != null) {
1500                 image.close();
1501             }
1502         }
1503 
1504         /**
1505          * Wait for image available, return immediately if the image was already
1506          * received, otherwise wait until an image arrives.
1507          */
waitForImageAvailable(long timeout)1508         public void waitForImageAvailable(long timeout) {
1509             if (imageReceived) {
1510                 imageReceived = false;
1511                 return;
1512             }
1513 
1514             if (imageAvailable.block(timeout)) {
1515                 imageAvailable.close();
1516                 imageReceived = true;
1517             } else {
1518                 throw new TimeoutRuntimeException("Unable to get the first image after "
1519                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
1520             }
1521         }
1522 
getTimeReceivedImage()1523         public long getTimeReceivedImage() {
1524             return mTimeReceivedImage;
1525         }
1526     }
1527 
1528     private static class SimpleTimingResultListener
1529             extends CameraCaptureSession.CaptureCallback {
1530         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
1531                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
1532         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
1533                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
1534 
1535         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)1536         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
1537                 TotalCaptureResult result) {
1538             try {
1539                 Long time = SystemClock.elapsedRealtime();
1540                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
1541             } catch (InterruptedException e) {
1542                 throw new UnsupportedOperationException(
1543                         "Can't handle InterruptedException in onCaptureCompleted");
1544             }
1545         }
1546 
1547         @Override
onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult)1548         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
1549                 CaptureResult partialResult) {
1550             try {
1551                 // check if AE and AF state exists
1552                 Long time = -1L;
1553                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
1554                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
1555                     time = SystemClock.elapsedRealtime();
1556                 }
1557                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
1558             } catch (InterruptedException e) {
1559                 throw new UnsupportedOperationException(
1560                         "Can't handle InterruptedException in onCaptureProgressed");
1561             }
1562         }
1563 
getPartialResultNTime(long timeout)1564         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
1565             try {
1566                 Pair<CaptureResult, Long> result =
1567                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
1568                 return result;
1569             } catch (InterruptedException e) {
1570                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1571             }
1572         }
1573 
getCaptureResultNTime(long timeout)1574         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
1575             try {
1576                 Pair<CaptureResult, Long> result =
1577                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
1578                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
1579                 return result;
1580             } catch (InterruptedException e) {
1581                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1582             }
1583         }
1584 
getPartialResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)1585         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
1586                 int numResultsWait) {
1587             if (numResultsWait < 0) {
1588                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1589             }
1590 
1591             Pair<CaptureResult, Long> result;
1592             int i = 0;
1593             do {
1594                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1595                 // The result may be null if no partials are produced on this particular path, so
1596                 // stop trying
1597                 if (result == null) break;
1598                 if (result.first.getRequest().equals(myRequest)) {
1599                     return result;
1600                 }
1601             } while (i++ < numResultsWait);
1602 
1603             // No partials produced - this may not be an error, since a given device may not
1604             // produce any partials on this testing path
1605             return null;
1606         }
1607 
getCaptureResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)1608         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
1609                 int numResultsWait) {
1610             if (numResultsWait < 0) {
1611                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1612             }
1613 
1614             Pair<CaptureResult, Long> result;
1615             int i = 0;
1616             do {
1617                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1618                 if (result.first.getRequest().equals(myRequest)) {
1619                     return result;
1620                 }
1621             } while (i++ < numResultsWait);
1622 
1623             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1624                     + "waiting for " + numResultsWait + " results");
1625         }
1626 
1627     }
1628 }
1629