1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static org.mockito.Mockito.*;
20 
21 import android.content.Context;
22 import android.content.pm.PackageManager;
23 import android.graphics.Bitmap;
24 import android.graphics.BitmapFactory;
25 import android.graphics.ColorSpace;
26 import android.graphics.Gainmap;
27 import android.graphics.ImageFormat;
28 import android.graphics.PointF;
29 import android.graphics.Rect;
30 import android.graphics.SurfaceTexture;
31 import android.hardware.camera2.CameraAccessException;
32 import android.hardware.camera2.CameraCaptureSession;
33 import android.hardware.camera2.CameraCharacteristics;
34 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
35 import android.hardware.camera2.CameraDevice;
36 import android.hardware.camera2.CameraDevice.CameraDeviceSetup;
37 import android.hardware.camera2.CameraManager;
38 import android.hardware.camera2.CameraMetadata;
39 import android.hardware.camera2.CaptureFailure;
40 import android.hardware.camera2.CaptureRequest;
41 import android.hardware.camera2.CaptureResult;
42 import android.hardware.camera2.MultiResolutionImageReader;
43 import android.hardware.camera2.TotalCaptureResult;
44 import android.hardware.camera2.cts.helpers.CameraErrorCollector;
45 import android.hardware.camera2.cts.helpers.StaticMetadata;
46 import android.hardware.camera2.params.DynamicRangeProfiles;
47 import android.hardware.camera2.params.InputConfiguration;
48 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation;
49 import android.hardware.camera2.params.MeteringRectangle;
50 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap;
51 import android.hardware.camera2.params.MultiResolutionStreamInfo;
52 import android.hardware.camera2.params.OutputConfiguration;
53 import android.hardware.camera2.params.SessionConfiguration;
54 import android.hardware.camera2.params.StreamConfigurationMap;
55 import android.hardware.cts.helpers.CameraUtils;
56 import android.location.Location;
57 import android.location.LocationManager;
58 import android.media.CamcorderProfile;
59 import android.media.ExifInterface;
60 import android.media.Image;
61 import android.media.Image.Plane;
62 import android.media.ImageReader;
63 import android.media.ImageWriter;
64 import android.os.Build;
65 import android.os.ConditionVariable;
66 import android.os.Handler;
67 import android.os.Looper;
68 import android.util.Log;
69 import android.util.Pair;
70 import android.util.Range;
71 import android.util.Size;
72 import android.view.Surface;
73 import android.view.WindowManager;
74 import android.view.WindowMetrics;
75 
76 import androidx.annotation.NonNull;
77 
78 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
79 import com.android.ex.camera2.blocking.BlockingSessionCallback;
80 import com.android.ex.camera2.blocking.BlockingStateCallback;
81 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
82 import com.android.internal.camera.flags.Flags;
83 
84 import junit.framework.Assert;
85 
86 import org.mockito.ArgumentCaptor;
87 import org.mockito.InOrder;
88 import org.mockito.Mockito;
89 
90 import java.io.FileOutputStream;
91 import java.io.IOException;
92 import java.lang.reflect.Array;
93 import java.nio.ByteBuffer;
94 import java.text.ParseException;
95 import java.text.SimpleDateFormat;
96 import java.util.ArrayList;
97 import java.util.Arrays;
98 import java.util.Collection;
99 import java.util.Collections;
100 import java.util.Comparator;
101 import java.util.Date;
102 import java.util.HashMap;
103 import java.util.HashSet;
104 import java.util.List;
105 import java.util.Map;
106 import java.util.Optional;
107 import java.util.Random;
108 import java.util.Set;
109 import java.util.concurrent.Executor;
110 import java.util.concurrent.LinkedBlockingQueue;
111 import java.util.concurrent.Semaphore;
112 import java.util.concurrent.TimeUnit;
113 import java.util.concurrent.atomic.AtomicLong;
114 
115 /**
116  * A package private utility class for wrapping up the camera2 cts test common utility functions
117  */
118 public class CameraTestUtils extends Assert {
119     private static final String TAG = "CameraTestUtils";
120     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
121     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
122     public static final Size SIZE_BOUND_720P = new Size(1280, 720);
123     public static final Size SIZE_BOUND_1080P = new Size(1920, 1088);
124     public static final Size SIZE_BOUND_2K = new Size(2048, 1088);
125     public static final Size SIZE_BOUND_QHD = new Size(2560, 1440);
126     public static final Size SIZE_BOUND_2160P = new Size(3840, 2160);
127     // Only test the preview size that is no larger than 1080p.
128     public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P;
129     // Default timeouts for reaching various states
130     public static final int CAMERA_OPEN_TIMEOUT_MS = 3000;
131     public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000;
132     public static final int CAMERA_IDLE_TIMEOUT_MS = 3000;
133     public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000;
134     public static final int CAMERA_BUSY_TIMEOUT_MS = 1000;
135     public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000;
136     public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000;
137     public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000;
138     public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000;
139 
140     public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000;
141     public static final int SESSION_CLOSE_TIMEOUT_MS = 3000;
142     public static final int SESSION_READY_TIMEOUT_MS = 5000;
143     public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000;
144 
145     public static final int MAX_READER_IMAGES = 5;
146 
147     public static final int INDEX_ALGORITHM_AE = 0;
148     public static final int INDEX_ALGORITHM_AWB = 1;
149     public static final int INDEX_ALGORITHM_AF = 2;
150     public static final int NUM_ALGORITHMS = 3; // AE, AWB and AF
151 
152     // Compensate for the loss of "sensitivity" and "sensitivityBoost"
153     public static final int MAX_ISO_MISMATCH = 3;
154 
155     public static final String OFFLINE_CAMERA_ID = "offline_camera_id";
156     public static final String REPORT_LOG_NAME = "CtsCameraTestCases";
157 
158     private static final int EXIF_DATETIME_LENGTH = 19;
159     private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
160     private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f;
161     private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f;
162     private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f;
163     private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
164 
165     private static final float ZOOM_RATIO_THRESHOLD = 0.01f;
166 
167     // Set such that 1920x1080 and 1920x1088 be treated as the same aspect ratio.
168     private static final float ASPECT_RATIO_MATCH_THRESHOLD = 0.014f;
169 
170     private static final int AVAILABILITY_TIMEOUT_MS = 10;
171 
172     private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
173     private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
174     private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
175 
176     static {
177         sTestLocation0.setTime(1199145600000L);
178         sTestLocation0.setLatitude(37.736071);
179         sTestLocation0.setLongitude(-122.441983);
180         sTestLocation0.setAltitude(21.0);
181 
182         sTestLocation1.setTime(1199145601000L);
183         sTestLocation1.setLatitude(0.736071);
184         sTestLocation1.setLongitude(0.441983);
185         sTestLocation1.setAltitude(1.0);
186 
187         sTestLocation2.setTime(1199145602000L);
188         sTestLocation2.setLatitude(-89.736071);
189         sTestLocation2.setLongitude(-179.441983);
190         sTestLocation2.setAltitude(100000.0);
191     }
192 
193     // Exif test data vectors.
194     public static final ExifTestData[] EXIF_TEST_DATA = {
195             new ExifTestData(
196                     /*gpsLocation*/ sTestLocation0,
197                     /* orientation */90,
198                     /* jpgQuality */(byte) 80,
199                     /* thumbQuality */(byte) 75),
200             new ExifTestData(
201                     /*gpsLocation*/ sTestLocation1,
202                     /* orientation */180,
203                     /* jpgQuality */(byte) 90,
204                     /* thumbQuality */(byte) 85),
205             new ExifTestData(
206                     /*gpsLocation*/ sTestLocation2,
207                     /* orientation */270,
208                     /* jpgQuality */(byte) 100,
209                     /* thumbQuality */(byte) 80)
210     };
211 
212     /**
213      * Create an {@link android.media.ImageReader} object and get the surface.
214      *
215      * @param size The size of this ImageReader to be created.
216      * @param format The format of this ImageReader to be created
217      * @param maxNumImages The max number of images that can be acquired simultaneously.
218      * @param listener The listener used by this ImageReader to notify callbacks.
219      * @param handler The handler to use for any listener callbacks.
220      */
makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)221     public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
222             ImageReader.OnImageAvailableListener listener, Handler handler) {
223         ImageReader reader;
224         reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
225                 maxNumImages);
226         reader.setOnImageAvailableListener(listener, handler);
227         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
228         return reader;
229     }
230 
231     /**
232      * Create an ImageWriter and hook up the ImageListener.
233      *
234      * @param inputSurface The input surface of the ImageWriter.
235      * @param maxImages The max number of Images that can be dequeued simultaneously.
236      * @param listener The listener used by this ImageWriter to notify callbacks
237      * @param handler The handler to post listener callbacks.
238      * @return ImageWriter object created.
239      */
makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)240     public static ImageWriter makeImageWriter(
241             Surface inputSurface, int maxImages,
242             ImageWriter.OnImageReleasedListener listener, Handler handler) {
243         ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
244         writer.setOnImageReleasedListener(listener, handler);
245         return writer;
246     }
247 
248     /**
249      * Utility class to store the targets for mandatory stream combination test.
250      */
251     public static class StreamCombinationTargets {
252         public List<SurfaceTexture> mPrivTargets = new ArrayList<>();
253         public List<ImageReader> mJpegTargets = new ArrayList<>();
254         public List<ImageReader> mYuvTargets = new ArrayList<>();
255         public List<ImageReader> mY8Targets = new ArrayList<>();
256         public List<ImageReader> mRawTargets = new ArrayList<>();
257         public List<ImageReader> mHeicTargets = new ArrayList<>();
258         public List<ImageReader> mDepth16Targets = new ArrayList<>();
259         public List<ImageReader> mP010Targets = new ArrayList<>();
260 
261 
262         public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>();
263         public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>();
264         public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>();
265         public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>();
266 
close()267         public void close() {
268             for (SurfaceTexture target : mPrivTargets) {
269                 target.release();
270             }
271             for (ImageReader target : mJpegTargets) {
272                 target.close();
273             }
274             for (ImageReader target : mYuvTargets) {
275                 target.close();
276             }
277             for (ImageReader target : mY8Targets) {
278                 target.close();
279             }
280             for (ImageReader target : mRawTargets) {
281                 target.close();
282             }
283             for (ImageReader target : mHeicTargets) {
284                 target.close();
285             }
286             for (ImageReader target : mDepth16Targets) {
287                 target.close();
288             }
289             for (ImageReader target : mP010Targets) {
290                 target.close();
291             }
292 
293             for (MultiResolutionImageReader target : mPrivMultiResTargets) {
294                 target.close();
295             }
296             for (MultiResolutionImageReader target : mJpegMultiResTargets) {
297                 target.close();
298             }
299             for (MultiResolutionImageReader target : mYuvMultiResTargets) {
300                 target.close();
301             }
302             for (MultiResolutionImageReader target : mRawMultiResTargets) {
303                 target.close();
304             }
305         }
306     }
307 
configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler, long dynamicRangeProfile, long streamUseCase)308     private static void configureTarget(StreamCombinationTargets targets,
309             List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces,
310             int format, Size targetSize, int numBuffers, String overridePhysicalCameraId,
311             MultiResolutionStreamConfigurationMap multiResStreamConfig,
312             boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler,
313             long dynamicRangeProfile, long streamUseCase) {
314         if (createMultiResiStreamConfig) {
315             Collection<MultiResolutionStreamInfo> multiResolutionStreams =
316                     multiResStreamConfig.getOutputInfo(format);
317             MultiResolutionImageReader multiResReader = new MultiResolutionImageReader(
318                     multiResolutionStreams, format, numBuffers);
319             multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler));
320             Collection<OutputConfiguration> configs =
321                     OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader);
322             outputConfigs.addAll(configs);
323             outputSurfaces.add(multiResReader.getSurface());
324             switch (format) {
325                 case ImageFormat.PRIVATE:
326                     targets.mPrivMultiResTargets.add(multiResReader);
327                     break;
328                 case ImageFormat.JPEG:
329                     targets.mJpegMultiResTargets.add(multiResReader);
330                     break;
331                 case ImageFormat.YUV_420_888:
332                     targets.mYuvMultiResTargets.add(multiResReader);
333                     break;
334                 case ImageFormat.RAW_SENSOR:
335                     targets.mRawMultiResTargets.add(multiResReader);
336                     break;
337                 default:
338                     fail("Unknown/Unsupported output format " + format);
339             }
340         } else {
341             if (format == ImageFormat.PRIVATE) {
342                 SurfaceTexture target = new SurfaceTexture(/*random int*/1);
343                 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight());
344                 OutputConfiguration config = new OutputConfiguration(new Surface(target));
345                 if (overridePhysicalCameraId != null) {
346                     config.setPhysicalCameraId(overridePhysicalCameraId);
347                 }
348                 config.setDynamicRangeProfile(dynamicRangeProfile);
349                 config.setStreamUseCase(streamUseCase);
350                 outputConfigs.add(config);
351                 outputSurfaces.add(config.getSurface());
352                 targets.mPrivTargets.add(target);
353             } else {
354                 ImageReader target = ImageReader.newInstance(targetSize.getWidth(),
355                         targetSize.getHeight(), format, numBuffers);
356                 target.setOnImageAvailableListener(listener, handler);
357                 OutputConfiguration config = new OutputConfiguration(target.getSurface());
358                 if (overridePhysicalCameraId != null) {
359                     config.setPhysicalCameraId(overridePhysicalCameraId);
360                 }
361                 config.setDynamicRangeProfile(dynamicRangeProfile);
362                 config.setStreamUseCase(streamUseCase);
363                 outputConfigs.add(config);
364                 outputSurfaces.add(config.getSurface());
365 
366                 switch (format) {
367                     case ImageFormat.JPEG:
368                       targets.mJpegTargets.add(target);
369                       break;
370                     case ImageFormat.YUV_420_888:
371                       targets.mYuvTargets.add(target);
372                       break;
373                     case ImageFormat.Y8:
374                       targets.mY8Targets.add(target);
375                       break;
376                     case ImageFormat.RAW_SENSOR:
377                       targets.mRawTargets.add(target);
378                       break;
379                     case ImageFormat.HEIC:
380                       targets.mHeicTargets.add(target);
381                       break;
382                     case ImageFormat.DEPTH16:
383                       targets.mDepth16Targets.add(target);
384                       break;
385                     case ImageFormat.YCBCR_P010:
386                       targets.mP010Targets.add(target);
387                       break;
388                     default:
389                       fail("Unknown/Unsupported output format " + format);
390                 }
391             }
392         }
393     }
394 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)395     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
396             StreamCombinationTargets targets,
397             List<OutputConfiguration> outputConfigs,
398             List<Surface> outputSurfaces, int numBuffers,
399             boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId,
400             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
401             List<Surface> uhSurfaces = new ArrayList<Surface>();
402         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
403             numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId,
404             multiResStreamConfig, handler);
405     }
406 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)407     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
408             StreamCombinationTargets targets,
409             List<OutputConfiguration> outputConfigs,
410             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
411             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
412             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
413         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
414                 numBuffers, substituteY8, substituteHeic, overridePhysicalCameraId,
415                 multiResStreamConfig, handler, /*dynamicRangeProfiles*/ null);
416     }
417 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler, List<Long> dynamicRangeProfiles)418     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
419             StreamCombinationTargets targets,
420             List<OutputConfiguration> outputConfigs,
421             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
422             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
423             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler,
424             List<Long> dynamicRangeProfiles) {
425 
426         Random rnd = new Random();
427         // 10-bit output capable streams will use a fixed dynamic range profile in case
428         // dynamicRangeProfiles.size() == 1 or random in case dynamicRangeProfiles.size() > 1
429         boolean use10BitRandomProfile = (dynamicRangeProfiles != null) &&
430                 (dynamicRangeProfiles.size() > 1);
431         if (use10BitRandomProfile) {
432             Long seed = rnd.nextLong();
433             Log.i(TAG, "Random seed used for selecting 10-bit output: " + seed);
434             rnd.setSeed(seed);
435         }
436         ImageDropperListener imageDropperListener = new ImageDropperListener();
437         List<Surface> chosenSurfaces;
438         for (MandatoryStreamInformation streamInfo : streamsInfo) {
439             if (streamInfo.isInput()) {
440                 continue;
441             }
442             chosenSurfaces = outputSurfaces;
443             if (streamInfo.isUltraHighResolution()) {
444                 chosenSurfaces = uhSurfaces;
445             }
446             int format = streamInfo.getFormat();
447             if (substituteY8 && (format == ImageFormat.YUV_420_888)) {
448                 format = ImageFormat.Y8;
449             } else if (substituteHeic && (format == ImageFormat.JPEG)) {
450                 format = ImageFormat.HEIC;
451             }
452 
453             long dynamicRangeProfile = DynamicRangeProfiles.STANDARD;
454             if (streamInfo.is10BitCapable() && use10BitRandomProfile) {
455                 boolean override10bit = rnd.nextBoolean();
456                 if (!override10bit) {
457                     dynamicRangeProfile = dynamicRangeProfiles.get(rnd.nextInt(
458                             dynamicRangeProfiles.size()));
459                     format = streamInfo.get10BitFormat();
460                 }
461             } else if (streamInfo.is10BitCapable() && (dynamicRangeProfiles != null)) {
462                 dynamicRangeProfile = dynamicRangeProfiles.get(0);
463                 format = streamInfo.get10BitFormat();
464             }
465             Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()];
466             availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes);
467             Size targetSize = CameraTestUtils.getMaxSize(availableSizes);
468             boolean createMultiResReader =
469                     (multiResStreamConfig != null &&
470                      !multiResStreamConfig.getOutputInfo(format).isEmpty() &&
471                      streamInfo.isMaximumSize());
472             switch (format) {
473                 case ImageFormat.PRIVATE:
474                 case ImageFormat.JPEG:
475                 case ImageFormat.YUV_420_888:
476                 case ImageFormat.YCBCR_P010:
477                 case ImageFormat.Y8:
478                 case ImageFormat.HEIC:
479                 case ImageFormat.DEPTH16:
480                 {
481                     configureTarget(targets, outputConfigs, chosenSurfaces, format,
482                             targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig,
483                             createMultiResReader, imageDropperListener, handler,
484                             dynamicRangeProfile, streamInfo.getStreamUseCase());
485                     break;
486                 }
487                 case ImageFormat.RAW_SENSOR: {
488                     // targetSize could be null in the logical camera case where only
489                     // physical camera supports RAW stream.
490                     if (targetSize != null) {
491                         configureTarget(targets, outputConfigs, chosenSurfaces, format,
492                                 targetSize, numBuffers, overridePhysicalCameraId,
493                                 multiResStreamConfig, createMultiResReader, imageDropperListener,
494                                 handler, dynamicRangeProfile, streamInfo.getStreamUseCase());
495                     }
496                     break;
497                 }
498                 default:
499                     fail("Unknown output format " + format);
500             }
501         }
502     }
503 
504     /**
505      * Close pending images and clean up an {@link android.media.ImageReader} object.
506      * @param reader an {@link android.media.ImageReader} to close.
507      */
closeImageReader(ImageReader reader)508     public static void closeImageReader(ImageReader reader) {
509         if (reader != null) {
510             reader.close();
511         }
512     }
513 
514     /**
515      * Close the pending images then close current active {@link ImageReader} objects.
516      */
closeImageReaders(ImageReader[] readers)517     public static void closeImageReaders(ImageReader[] readers) {
518         if ((readers != null) && (readers.length > 0)) {
519             for (ImageReader reader : readers) {
520                 CameraTestUtils.closeImageReader(reader);
521             }
522         }
523     }
524 
525     /**
526      * Close pending images and clean up an {@link android.media.ImageWriter} object.
527      * @param writer an {@link android.media.ImageWriter} to close.
528      */
closeImageWriter(ImageWriter writer)529     public static void closeImageWriter(ImageWriter writer) {
530         if (writer != null) {
531             writer.close();
532         }
533     }
534 
535     /**
536      * Placeholder listener that release the image immediately once it is available.
537      *
538      * <p>
539      * It can be used for the case where we don't care the image data at all.
540      * </p>
541      */
542     public static class ImageDropperListener implements ImageReader.OnImageAvailableListener {
543         @Override
onImageAvailable(ImageReader reader)544         public synchronized void onImageAvailable(ImageReader reader) {
545             Image image = null;
546             try {
547                 image = reader.acquireNextImage();
548             } finally {
549                 if (image != null) {
550                     image.close();
551                     mImagesDropped++;
552                 }
553             }
554         }
555 
getImageCount()556         public synchronized int getImageCount() {
557             return mImagesDropped;
558         }
559 
resetImageCount()560         public synchronized void resetImageCount() {
561             mImagesDropped = 0;
562         }
563 
564         private int mImagesDropped = 0;
565     }
566 
567     /**
568      * Image listener that release the image immediately after validating the image
569      */
570     public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener {
571         private Size mSize;
572         private int mFormat;
573         // Whether the parent ImageReader is valid or not. If the parent ImageReader
574         // is destroyed, the acquired Image may become invalid.
575         private boolean mReaderIsValid;
576 
ImageVerifierListener(Size sz, int format)577         public ImageVerifierListener(Size sz, int format) {
578             mSize = sz;
579             mFormat = format;
580             mReaderIsValid = true;
581         }
582 
onReaderDestroyed()583         public synchronized void onReaderDestroyed() {
584             mReaderIsValid = false;
585         }
586 
587         @Override
onImageAvailable(ImageReader reader)588         public synchronized void onImageAvailable(ImageReader reader) {
589             Image image = null;
590             try {
591                 image = reader.acquireNextImage();
592             } finally {
593                 if (image != null) {
594                     // Should only do some quick validity checks in callback, as the ImageReader
595                     // could be closed asynchronously, which will close all images acquired from
596                     // this ImageReader.
597                     checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat);
598                     // checkAndroidImageFormat calls into underlying Image object, which could
599                     // become invalid if the ImageReader is destroyed.
600                     if (mReaderIsValid) {
601                         checkAndroidImageFormat(image);
602                     }
603                     image.close();
604                 }
605             }
606         }
607     }
608 
609     public static class SimpleImageReaderListener
610             implements ImageReader.OnImageAvailableListener {
611         private final LinkedBlockingQueue<Image> mQueue =
612                 new LinkedBlockingQueue<Image>();
613         // Indicate whether this listener will drop images or not,
614         // when the queued images reaches the reader maxImages
615         private final boolean mAsyncMode;
616         // maxImages held by the queue in async mode.
617         private final int mMaxImages;
618 
619         /**
620          * Create a synchronous SimpleImageReaderListener that queues the images
621          * automatically when they are available, no image will be dropped. If
622          * the caller doesn't call getImage(), the producer will eventually run
623          * into buffer starvation.
624          */
SimpleImageReaderListener()625         public SimpleImageReaderListener() {
626             mAsyncMode = false;
627             mMaxImages = 0;
628         }
629 
630         /**
631          * Create a synchronous/asynchronous SimpleImageReaderListener that
632          * queues the images automatically when they are available. For
633          * asynchronous listener, image will be dropped if the queued images
634          * reach to maxImages queued. If the caller doesn't call getImage(), the
635          * producer will not be blocked. For synchronous listener, no image will
636          * be dropped. If the caller doesn't call getImage(), the producer will
637          * eventually run into buffer starvation.
638          *
639          * @param asyncMode If the listener is operating at asynchronous mode.
640          * @param maxImages The max number of images held by this listener.
641          */
642         /**
643          *
644          * @param asyncMode
645          */
SimpleImageReaderListener(boolean asyncMode, int maxImages)646         public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
647             mAsyncMode = asyncMode;
648             mMaxImages = maxImages;
649         }
650 
651         @Override
onImageAvailable(ImageReader reader)652         public void onImageAvailable(ImageReader reader) {
653             try {
654                 Image imge = reader.acquireNextImage();
655                 if (imge == null) {
656                     return;
657                 }
658                 mQueue.put(imge);
659                 if (mAsyncMode && mQueue.size() >= mMaxImages) {
660                     Image img = mQueue.poll();
661                     img.close();
662                 }
663             } catch (InterruptedException e) {
664                 throw new UnsupportedOperationException(
665                         "Can't handle InterruptedException in onImageAvailable");
666             }
667         }
668 
669         /**
670          * Get an image from the image reader.
671          *
672          * @param timeout Timeout value for the wait.
673          * @return The image from the image reader.
674          */
getImage(long timeout)675         public Image getImage(long timeout) throws InterruptedException {
676             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
677             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
678             return image;
679         }
680 
681         /**
682          * Drain the pending images held by this listener currently.
683          *
684          */
drain()685         public void drain() {
686             while (!mQueue.isEmpty()) {
687                 Image image = mQueue.poll();
688                 assertNotNull("Unable to get an image", image);
689                 image.close();
690             }
691         }
692     }
693 
694     public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
695         private final Semaphore mImageReleasedSema = new Semaphore(0);
696         private final ImageWriter mWriter;
697         @Override
onImageReleased(ImageWriter writer)698         public void onImageReleased(ImageWriter writer) {
699             if (writer != mWriter) {
700                 return;
701             }
702 
703             if (VERBOSE) {
704                 Log.v(TAG, "Input image is released");
705             }
706             mImageReleasedSema.release();
707         }
708 
SimpleImageWriterListener(ImageWriter writer)709         public SimpleImageWriterListener(ImageWriter writer) {
710             if (writer == null) {
711                 throw new IllegalArgumentException("writer cannot be null");
712             }
713             mWriter = writer;
714         }
715 
waitForImageReleased(long timeoutMs)716         public void waitForImageReleased(long timeoutMs) throws InterruptedException {
717             if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
718                 fail("wait for image available timed out after " + timeoutMs + "ms");
719             }
720         }
721     }
722 
723     public static class ImageAndMultiResStreamInfo {
724         public final Image image;
725         public final MultiResolutionStreamInfo streamInfo;
726 
ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)727         public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) {
728             this.image = image;
729             this.streamInfo = streamInfo;
730         }
731     }
732 
733     public static class SimpleMultiResolutionImageReaderListener
734             implements ImageReader.OnImageAvailableListener {
SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)735         public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner,
736                 int maxBuffers, boolean acquireLatest) {
737             mOwner = owner;
738             mMaxBuffers = maxBuffers;
739             mAcquireLatest = acquireLatest;
740         }
741 
742         @Override
onImageAvailable(ImageReader reader)743         public void onImageAvailable(ImageReader reader) {
744             if (VERBOSE) Log.v(TAG, "new image available from reader " + reader.toString());
745 
746             if (mAcquireLatest) {
747                 synchronized (mLock) {
748                     // If there is switch of image readers, acquire and releases all images
749                     // from the previous image reader
750                     if (mLastReader != reader) {
751                         if (mLastReader != null) {
752                             Image image = mLastReader.acquireLatestImage();
753                             if (image != null) {
754                                 image.close();
755                             }
756                         }
757                         mLastReader = reader;
758                     }
759                 }
760                 mImageAvailable.open();
761             } else {
762                 if (mQueue.size() < mMaxBuffers) {
763                     Image image = reader.acquireNextImage();
764                     MultiResolutionStreamInfo multiResStreamInfo =
765                             mOwner.getStreamInfoForImageReader(reader);
766                     mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo));
767                 }
768             }
769         }
770 
getAnyImageAndInfoAvailable(long timeoutMs)771         public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs)
772                 throws Exception {
773             if (mAcquireLatest) {
774                 Image image = null;
775                 if (mImageAvailable.block(timeoutMs)) {
776                     synchronized (mLock) {
777                         if (mLastReader != null) {
778                             image = mLastReader.acquireLatestImage();
779                             if (VERBOSE) Log.v(TAG, "acquireLatestImage from "
780                                     + mLastReader.toString() + " produces " + image);
781                         } else {
782                             fail("invalid image reader");
783                         }
784                     }
785                     mImageAvailable.close();
786                 } else {
787                     fail("wait for image available time out after " + timeoutMs + "ms");
788                 }
789                 return image == null ? null : new ImageAndMultiResStreamInfo(image,
790                         mOwner.getStreamInfoForImageReader(mLastReader));
791             } else {
792                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs,
793                         java.util.concurrent.TimeUnit.MILLISECONDS);
794                 if (imageAndInfo == null) {
795                     fail("wait for image available timed out after " + timeoutMs + "ms");
796                 }
797                 return imageAndInfo;
798             }
799         }
800 
reset()801         public void reset() {
802             while (!mQueue.isEmpty()) {
803                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll();
804                 assertNotNull("Acquired image is not valid", imageAndInfo.image);
805                 imageAndInfo.image.close();
806             }
807             mImageAvailable.close();
808             mLastReader = null;
809         }
810 
811         private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue =
812                 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>();
813         private final MultiResolutionImageReader mOwner;
814         private final int mMaxBuffers;
815         private final boolean mAcquireLatest;
816         private ConditionVariable mImageAvailable = new ConditionVariable();
817         private ImageReader mLastReader = null;
818         private final Object mLock = new Object();
819     }
820 
821     public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
822         private final LinkedBlockingQueue<TotalCaptureResult> mQueue =
823                 new LinkedBlockingQueue<TotalCaptureResult>();
824         private final LinkedBlockingQueue<CaptureFailure> mFailureQueue =
825                 new LinkedBlockingQueue<>();
826         // (Surface, framenumber) pair for lost buffers
827         private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue =
828                 new LinkedBlockingQueue<>();
829         private final LinkedBlockingQueue<Integer> mAbortQueue =
830                 new LinkedBlockingQueue<>();
831         // Pair<CaptureRequest, Long> is a pair of capture request and start of exposure timestamp.
832         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue =
833                 new LinkedBlockingQueue<>();
834         // Pair<CaptureRequest, Long> is a pair of capture request and readout timestamp.
835         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mReadoutStartQueue =
836                 new LinkedBlockingQueue<>();
837         // Pair<Int, Long> is a pair of sequence id and frame number
838         private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue =
839                 new LinkedBlockingQueue<>();
840 
841         private AtomicLong mNumFramesArrived = new AtomicLong(0);
842 
843         @Override
onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)844         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
845                 long timestamp, long frameNumber) {
846             try {
847                 mCaptureStartQueue.put(new Pair(request, timestamp));
848             } catch (InterruptedException e) {
849                 throw new UnsupportedOperationException(
850                         "Can't handle InterruptedException in onCaptureStarted");
851             }
852         }
853 
854         @Override
onReadoutStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)855         public void onReadoutStarted(CameraCaptureSession session, CaptureRequest request,
856                 long timestamp, long frameNumber) {
857             try {
858                 mReadoutStartQueue.put(new Pair(request, timestamp));
859             } catch (InterruptedException e) {
860                 throw new UnsupportedOperationException(
861                         "Can't handle InterruptedException in onReadoutStarted");
862             }
863         }
864 
865         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)866         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
867                 TotalCaptureResult result) {
868             try {
869                 mNumFramesArrived.incrementAndGet();
870                 mQueue.put(result);
871             } catch (InterruptedException e) {
872                 throw new UnsupportedOperationException(
873                         "Can't handle InterruptedException in onCaptureCompleted");
874             }
875         }
876 
877         @Override
onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)878         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
879                 CaptureFailure failure) {
880             try {
881                 mFailureQueue.put(failure);
882             } catch (InterruptedException e) {
883                 throw new UnsupportedOperationException(
884                         "Can't handle InterruptedException in onCaptureFailed");
885             }
886         }
887 
888         @Override
onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)889         public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) {
890             try {
891                 mAbortQueue.put(sequenceId);
892             } catch (InterruptedException e) {
893                 throw new UnsupportedOperationException(
894                         "Can't handle InterruptedException in onCaptureAborted");
895             }
896         }
897 
898         @Override
onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)899         public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId,
900                 long frameNumber) {
901             try {
902                 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber));
903             } catch (InterruptedException e) {
904                 throw new UnsupportedOperationException(
905                         "Can't handle InterruptedException in onCaptureSequenceCompleted");
906             }
907         }
908 
909         @Override
onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)910         public void onCaptureBufferLost(CameraCaptureSession session,
911                 CaptureRequest request, Surface target, long frameNumber) {
912             try {
913                 mBufferLostQueue.put(new Pair<>(target, frameNumber));
914             } catch (InterruptedException e) {
915                 throw new UnsupportedOperationException(
916                         "Can't handle InterruptedException in onCaptureBufferLost");
917             }
918         }
919 
getTotalNumFrames()920         public long getTotalNumFrames() {
921             return mNumFramesArrived.get();
922         }
923 
getCaptureResult(long timeout)924         public CaptureResult getCaptureResult(long timeout) {
925             return getTotalCaptureResult(timeout);
926         }
927 
getCaptureResult(long timeout, long timestamp)928         public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
929             try {
930                 long currentTs = -1L;
931                 TotalCaptureResult result;
932                 while (true) {
933                     result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
934                     if (result == null) {
935                         throw new RuntimeException(
936                                 "Wait for a capture result timed out in " + timeout + "ms");
937                     }
938                     currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
939                     if (currentTs == timestamp) {
940                         return result;
941                     }
942                 }
943 
944             } catch (InterruptedException e) {
945                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
946             }
947         }
948 
getTotalCaptureResult(long timeout)949         public TotalCaptureResult getTotalCaptureResult(long timeout) {
950             try {
951                 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
952                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
953                 return result;
954             } catch (InterruptedException e) {
955                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
956             }
957         }
958 
959         /**
960          * Get the {@link #CaptureResult capture result} for a given
961          * {@link #CaptureRequest capture request}.
962          *
963          * @param myRequest The {@link #CaptureRequest capture request} whose
964          *            corresponding {@link #CaptureResult capture result} was
965          *            being waited for
966          * @param numResultsWait Number of frames to wait for the capture result
967          *            before timeout.
968          * @throws TimeoutRuntimeException If more than numResultsWait results are
969          *            seen before the result matching myRequest arrives, or each
970          *            individual wait for result times out after
971          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
972          */
getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)973         public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest,
974                 int numResultsWait) {
975             return getTotalCaptureResultForRequest(myRequest, numResultsWait);
976         }
977 
978         /**
979          * Get the {@link #TotalCaptureResult total capture result} for a given
980          * {@link #CaptureRequest capture request}.
981          *
982          * @param myRequest The {@link #CaptureRequest capture request} whose
983          *            corresponding {@link #TotalCaptureResult capture result} was
984          *            being waited for
985          * @param numResultsWait Number of frames to wait for the capture result
986          *            before timeout.
987          * @throws TimeoutRuntimeException If more than numResultsWait results are
988          *            seen before the result matching myRequest arrives, or each
989          *            individual wait for result times out after
990          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
991          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)992         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
993                 int numResultsWait) {
994             return getTotalCaptureResultForRequest(myRequest, numResultsWait,
995                     CAPTURE_RESULT_TIMEOUT_MS);
996         }
997 
998         /**
999          * Get the {@link #TotalCaptureResult total capture result} for a given
1000          * {@link #CaptureRequest capture request}.
1001          *
1002          * @param myRequest The {@link #CaptureRequest capture request} whose
1003          *            corresponding {@link #TotalCaptureResult capture result} was
1004          *            being waited for
1005          * @param numResultsWait Number of frames to wait for the capture result
1006          *            before timeout.
1007          * @param timeoutForResult Timeout to wait for each capture result.
1008          * @throws TimeoutRuntimeException If more than numResultsWait results are
1009          *            seen before the result matching myRequest arrives, or each
1010          *            individual wait for result times out after
1011          *            timeoutForResult ms.
1012          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait, int timeoutForResult)1013         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
1014                 int numResultsWait, int timeoutForResult) {
1015             ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
1016             captureRequests.add(myRequest);
1017             return getTotalCaptureResultsForRequests(
1018                     captureRequests, numResultsWait, timeoutForResult)[0];
1019         }
1020 
1021         /**
1022          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1023          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1024          * may not the same as the order of requests.
1025          *
1026          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1027          *            corresponding {@link #TotalCaptureResult capture results} are
1028          *            being waited for.
1029          * @param numResultsWait Number of frames to wait for the capture results
1030          *            before timeout.
1031          * @throws TimeoutRuntimeException If more than numResultsWait results are
1032          *            seen before all the results matching captureRequests arrives.
1033          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)1034         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1035                 List<CaptureRequest> captureRequests, int numResultsWait) {
1036             return getTotalCaptureResultsForRequests(captureRequests, numResultsWait,
1037                     CAPTURE_RESULT_TIMEOUT_MS);
1038         }
1039 
1040         /**
1041          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1042          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1043          * may not the same as the order of requests.
1044          *
1045          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1046          *            corresponding {@link #TotalCaptureResult capture results} are
1047          *            being waited for.
1048          * @param numResultsWait Number of frames to wait for the capture results
1049          *            before timeout.
1050          * @param timeoutForResult Timeout to wait for each capture result.
1051          * @throws TimeoutRuntimeException If more than numResultsWait results are
1052          *            seen before all the results matching captureRequests arrives.
1053          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult)1054         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1055                 List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult) {
1056             if (numResultsWait < 0) {
1057                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1058             }
1059             if (captureRequests == null || captureRequests.size() == 0) {
1060                 throw new IllegalArgumentException("captureRequests must have at least 1 request.");
1061             }
1062 
1063             // Create a request -> a list of result indices map that it will wait for.
1064             HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
1065             for (int i = 0; i < captureRequests.size(); i++) {
1066                 CaptureRequest request = captureRequests.get(i);
1067                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1068                 if (indices == null) {
1069                     indices = new ArrayList<>();
1070                     remainingResultIndicesMap.put(request, indices);
1071                 }
1072                 indices.add(i);
1073             }
1074 
1075             TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
1076             int i = 0;
1077             do {
1078                 TotalCaptureResult result = getTotalCaptureResult(timeoutForResult);
1079                 CaptureRequest request = result.getRequest();
1080                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1081                 if (indices != null) {
1082                     results[indices.get(0)] = result;
1083                     indices.remove(0);
1084 
1085                     // Remove the entry if all results for this request has been fulfilled.
1086                     if (indices.isEmpty()) {
1087                         remainingResultIndicesMap.remove(request);
1088                     }
1089                 }
1090 
1091                 if (remainingResultIndicesMap.isEmpty()) {
1092                     return results;
1093                 }
1094             } while (i++ < numResultsWait);
1095 
1096             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1097                     + "waiting for " + numResultsWait + " results");
1098         }
1099 
1100         /**
1101          * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries
1102          * at most. If it times out before maxNumFailures failures are received, return the failures
1103          * received so far.
1104          *
1105          * @param maxNumFailures The maximal number of failures to return. If it times out before
1106          *                       the maximal number of failures are received, return the received
1107          *                       failures so far.
1108          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1109          */
getCaptureFailures(long maxNumFailures)1110         public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) {
1111             ArrayList<CaptureFailure> failures = new ArrayList<>();
1112             try {
1113                 for (int i = 0; i < maxNumFailures; i++) {
1114                     CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1115                             TimeUnit.MILLISECONDS);
1116                     if (failure == null) {
1117                         // If waiting on a failure times out, return the failures so far.
1118                         break;
1119                     }
1120                     failures.add(failure);
1121                 }
1122             }  catch (InterruptedException e) {
1123                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1124             }
1125 
1126             return failures;
1127         }
1128 
1129         /**
1130          * Get an array list of lost buffers with maxNumLost entries at most.
1131          * If it times out before maxNumLost buffer lost callbacks are received, return the
1132          * lost callbacks received so far.
1133          *
1134          * @param maxNumLost The maximal number of buffer lost failures to return. If it times out
1135          *                   before the maximal number of failures are received, return the received
1136          *                   buffer lost failures so far.
1137          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1138          */
getLostBuffers(long maxNumLost)1139         public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) {
1140             ArrayList<Pair<Surface, Long>> failures = new ArrayList<>();
1141             try {
1142                 for (int i = 0; i < maxNumLost; i++) {
1143                     Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1144                             TimeUnit.MILLISECONDS);
1145                     if (failure == null) {
1146                         // If waiting on a failure times out, return the failures so far.
1147                         break;
1148                     }
1149                     failures.add(failure);
1150                 }
1151             }  catch (InterruptedException e) {
1152                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1153             }
1154 
1155             return failures;
1156         }
1157 
1158         /**
1159          * Get an array list of aborted capture sequence ids with maxNumAborts entries
1160          * at most. If it times out before maxNumAborts are received, return the aborted sequences
1161          * received so far.
1162          *
1163          * @param maxNumAborts The maximal number of aborted sequences to return. If it times out
1164          *                     before the maximal number of aborts are received, return the received
1165          *                     failed sequences so far.
1166          * @throws UnsupportedOperationException If an error happens while waiting on the failed
1167          *                                       sequences.
1168          */
geAbortedSequences(long maxNumAborts)1169         public ArrayList<Integer> geAbortedSequences(long maxNumAborts) {
1170             ArrayList<Integer> abortList = new ArrayList<>();
1171             try {
1172                 for (int i = 0; i < maxNumAborts; i++) {
1173                     Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1174                             TimeUnit.MILLISECONDS);
1175                     if (abortSequence == null) {
1176                         break;
1177                     }
1178                     abortList.add(abortSequence);
1179                 }
1180             }  catch (InterruptedException e) {
1181                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1182             }
1183 
1184             return abortList;
1185         }
1186 
1187         /**
1188          * Wait until the capture start of a request and expected timestamp arrives or it times
1189          * out after a number of capture starts.
1190          *
1191          * @param request The request for the capture start to wait for.
1192          * @param timestamp The timestamp for the capture start to wait for.
1193          * @param numCaptureStartsWait The number of capture start events to wait for before timing
1194          *                             out.
1195          */
waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1196         public void waitForCaptureStart(CaptureRequest request, Long timestamp,
1197                 int numCaptureStartsWait) throws Exception {
1198             Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp);
1199 
1200             int i = 0;
1201             do {
1202                 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll(
1203                         CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1204 
1205                 if (shutter == null) {
1206                     throw new TimeoutRuntimeException("Unable to get any more capture start " +
1207                             "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms.");
1208                 } else if (expectedShutter.equals(shutter)) {
1209                     return;
1210                 }
1211 
1212             } while (i++ < numCaptureStartsWait);
1213 
1214             throw new TimeoutRuntimeException("Unable to get the expected capture start " +
1215                     "event after waiting for " + numCaptureStartsWait + " capture starts");
1216         }
1217 
1218         /**
1219          * Wait until it receives capture sequence completed callback for a given squence ID.
1220          *
1221          * @param sequenceId The sequence ID of the capture sequence completed callback to wait for.
1222          * @param timeoutMs Time to wait for each capture sequence complete callback before
1223          *                  timing out.
1224          */
getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1225         public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) {
1226             try {
1227                 while (true) {
1228                     Pair<Integer, Long> completedSequence =
1229                             mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
1230                     assertNotNull("Wait for a capture sequence completed timed out in " +
1231                             timeoutMs + "ms", completedSequence);
1232 
1233                     if (completedSequence.first.equals(sequenceId)) {
1234                         return completedSequence.second.longValue();
1235                     }
1236                 }
1237             } catch (InterruptedException e) {
1238                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1239             }
1240         }
1241 
hasMoreResults()1242         public boolean hasMoreResults()
1243         {
1244             return !mQueue.isEmpty();
1245         }
1246 
hasMoreFailures()1247         public boolean hasMoreFailures()
1248         {
1249             return !mFailureQueue.isEmpty();
1250         }
1251 
getNumLostBuffers()1252         public int getNumLostBuffers()
1253         {
1254             return mBufferLostQueue.size();
1255         }
1256 
hasMoreAbortedSequences()1257         public boolean hasMoreAbortedSequences()
1258         {
1259             return !mAbortQueue.isEmpty();
1260         }
1261 
getCaptureStartTimestamps(int count)1262         public List<Long> getCaptureStartTimestamps(int count) {
1263             List<Long> timestamps = new ArrayList<Long>();
1264             try {
1265                 while (timestamps.size() < count) {
1266                     Pair<CaptureRequest, Long> captureStart = mCaptureStartQueue.poll(
1267                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1268                     assertNotNull("Wait for a capture start timed out in "
1269                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", captureStart);
1270 
1271                     timestamps.add(captureStart.second);
1272                 }
1273                 return timestamps;
1274             } catch (InterruptedException e) {
1275                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1276             }
1277         }
1278 
1279         /**
1280          * Get start of readout timestamps
1281          *
1282          * @param count The number of captures
1283          * @return The list of start of readout timestamps
1284          */
getReadoutStartTimestamps(int count)1285         public List<Long> getReadoutStartTimestamps(int count) {
1286             List<Long> timestamps = new ArrayList<Long>();
1287             try {
1288                 while (timestamps.size() < count) {
1289                     Pair<CaptureRequest, Long> readoutStart = mReadoutStartQueue.poll(
1290                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1291                     assertNotNull("Wait for a readout start timed out in "
1292                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", readoutStart);
1293 
1294                     timestamps.add(readoutStart.second);
1295                 }
1296                 return timestamps;
1297             } catch (InterruptedException e) {
1298                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1299             }
1300         }
1301 
drain()1302         public void drain() {
1303             mQueue.clear();
1304             mNumFramesArrived.getAndSet(0);
1305             mFailureQueue.clear();
1306             mBufferLostQueue.clear();
1307             mCaptureStartQueue.clear();
1308             mReadoutStartQueue.clear();
1309             mAbortQueue.clear();
1310         }
1311     }
1312 
1313     private static class BlockingCameraManager
1314             extends com.android.ex.camera2.blocking.BlockingCameraManager {
1315 
BlockingCameraManager(CameraManager manager)1316         BlockingCameraManager(CameraManager manager) {
1317             super(manager);
1318         }
1319 
openCamera(String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1320         public CameraDevice openCamera(String cameraId, boolean overrideToPortrait,
1321                 CameraDevice.StateCallback listener, Handler handler)
1322                 throws CameraAccessException, BlockingOpenException {
1323             if (handler == null) {
1324                 throw new IllegalArgumentException("handler must not be null");
1325             } else if (handler.getLooper() == Looper.myLooper()) {
1326                 throw new IllegalArgumentException(
1327                         "handler's looper must not be the current looper");
1328             }
1329 
1330             return (new OpenListener(mManager, cameraId, overrideToPortrait, listener, handler))
1331                     .blockUntilOpen();
1332         }
1333 
1334         protected class OpenListener
1335                 extends com.android.ex.camera2.blocking.BlockingCameraManager.OpenListener {
OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1336             OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait,
1337                     CameraDevice.StateCallback listener, Handler handler)
1338                     throws CameraAccessException {
1339                 super(cameraId, listener);
1340                 manager.openCamera(cameraId, overrideToPortrait, handler, this);
1341             }
1342         }
1343     }
1344 
hasCapability(CameraCharacteristics characteristics, int capability)1345     public static boolean hasCapability(CameraCharacteristics characteristics, int capability) {
1346         int [] capabilities =
1347                 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1348         for (int c : capabilities) {
1349             if (c == capability) {
1350                 return true;
1351             }
1352         }
1353         return false;
1354     }
1355 
isSystemCamera(CameraManager manager, String cameraId)1356     public static boolean isSystemCamera(CameraManager manager, String cameraId)
1357             throws CameraAccessException {
1358         CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
1359         return hasCapability(characteristics,
1360                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA);
1361     }
1362 
getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1363     public static String[] getCameraIdListForTesting(CameraManager manager,
1364             boolean getSystemCameras)
1365             throws CameraAccessException {
1366         String [] ids = manager.getCameraIdListNoLazy();
1367         List<String> idsForTesting = new ArrayList<String>();
1368         for (String id : ids) {
1369             boolean isSystemCamera = isSystemCamera(manager, id);
1370             if (getSystemCameras == isSystemCamera) {
1371                 idsForTesting.add(id);
1372             }
1373         }
1374         return idsForTesting.toArray(new String[idsForTesting.size()]);
1375     }
1376 
getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1377     public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager,
1378             boolean getSystemCameras)
1379             throws CameraAccessException {
1380         Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras)));
1381         Set<Set<String>> combinations =  manager.getConcurrentCameraIds();
1382         Set<Set<String>> correctComb = new HashSet<Set<String>>();
1383         for (Set<String> comb : combinations) {
1384             Set<String> filteredIds = new HashSet<String>();
1385             for (String id : comb) {
1386                 if (cameraIds.contains(id)) {
1387                     filteredIds.add(id);
1388                 }
1389             }
1390             if (filteredIds.isEmpty()) {
1391                 continue;
1392             }
1393             correctComb.add(filteredIds);
1394         }
1395         return correctComb;
1396     }
1397 
1398     /**
1399      * Block until the camera is opened.
1400      *
1401      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1402      * an AssertionError if it fails to open the camera device.</p>
1403      *
1404      * @return CameraDevice opened camera device
1405      *
1406      * @throws IllegalArgumentException
1407      *            If the handler is null, or if the handler's looper is current.
1408      * @throws CameraAccessException
1409      *            If open fails immediately.
1410      * @throws BlockingOpenException
1411      *            If open fails after blocking for some amount of time.
1412      * @throws TimeoutRuntimeException
1413      *            If opening times out. Typically unrecoverable.
1414      */
openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1415     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1416             CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException,
1417             BlockingOpenException {
1418 
1419         /**
1420          * Although camera2 API allows 'null' Handler (it will just use the current
1421          * thread's Looper), this is not what we want for CTS.
1422          *
1423          * In CTS the default looper is used only to process events in between test runs,
1424          * so anything sent there would not be executed inside a test and the test would fail.
1425          *
1426          * In this case, BlockingCameraManager#openCamera performs the check for us.
1427          */
1428         return (new CameraTestUtils.BlockingCameraManager(manager))
1429                 .openCamera(cameraId, listener, handler);
1430     }
1431 
1432     /**
1433      * Block until the camera is opened.
1434      *
1435      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1436      * an AssertionError if it fails to open the camera device.</p>
1437      *
1438      * @throws IllegalArgumentException
1439      *            If the handler is null, or if the handler's looper is current.
1440      * @throws CameraAccessException
1441      *            If open fails immediately.
1442      * @throws BlockingOpenException
1443      *            If open fails after blocking for some amount of time.
1444      * @throws TimeoutRuntimeException
1445      *            If opening times out. Typically unrecoverable.
1446      */
openCamera(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1447     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1448             boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)
1449             throws CameraAccessException, BlockingOpenException {
1450         return (new CameraTestUtils.BlockingCameraManager(manager))
1451                 .openCamera(cameraId, overrideToPortrait, listener, handler);
1452     }
1453 
1454 
1455     /**
1456      * Block until the camera is opened.
1457      *
1458      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1459      * an AssertionError if it fails to open the camera device.</p>
1460      *
1461      * @throws IllegalArgumentException
1462      *            If the handler is null, or if the handler's looper is current.
1463      * @throws CameraAccessException
1464      *            If open fails immediately.
1465      * @throws BlockingOpenException
1466      *            If open fails after blocking for some amount of time.
1467      * @throws TimeoutRuntimeException
1468      *            If opening times out. Typically unrecoverable.
1469      */
openCamera(CameraManager manager, String cameraId, Handler handler)1470     public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler)
1471             throws CameraAccessException,
1472             BlockingOpenException {
1473         return openCamera(manager, cameraId, /*listener*/null, handler);
1474     }
1475 
1476     /**
1477      * Configure a new camera session with output surfaces and type.
1478      *
1479      * @param camera The CameraDevice to be configured.
1480      * @param outputSurfaces The surface list that used for camera output.
1481      * @param listener The callback CameraDevice will notify when capture results are available.
1482      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1483     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1484             List<Surface> outputSurfaces, boolean isHighSpeed,
1485             CameraCaptureSession.StateCallback listener, Handler handler)
1486             throws CameraAccessException {
1487         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1488         if (isHighSpeed) {
1489             camera.createConstrainedHighSpeedCaptureSession(outputSurfaces,
1490                     sessionListener, handler);
1491         } else {
1492             camera.createCaptureSession(outputSurfaces, sessionListener, handler);
1493         }
1494         CameraCaptureSession session =
1495                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1496         assertFalse("Camera session should not be a reprocessable session",
1497                 session.isReprocessable());
1498         String sessionType = isHighSpeed ? "High Speed" : "Normal";
1499         assertTrue("Capture session type must be " + sessionType,
1500                 isHighSpeed ==
1501                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass()));
1502 
1503         return session;
1504     }
1505 
1506     /**
1507      * Build a new constrained camera session with output surfaces, type and recording session
1508      * parameters.
1509      *
1510      * @param camera The CameraDevice to be configured.
1511      * @param outputSurfaces The surface list that used for camera output.
1512      * @param listener The callback CameraDevice will notify when capture results are available.
1513      * @param initialRequest Initial request settings to use as session parameters.
1514      */
buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1515     public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera,
1516             List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener,
1517             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1518         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1519 
1520         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1521         for (Surface surface : outputSurfaces) {
1522             outConfigurations.add(new OutputConfiguration(surface));
1523         }
1524         SessionConfiguration sessionConfig = new SessionConfiguration(
1525                 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations,
1526                 new HandlerExecutor(handler), sessionListener);
1527         sessionConfig.setSessionParameters(initialRequest);
1528         camera.createCaptureSession(sessionConfig);
1529 
1530         CameraCaptureSession session =
1531                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1532         assertFalse("Camera session should not be a reprocessable session",
1533                 session.isReprocessable());
1534         assertTrue("Capture session type must be High Speed",
1535                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1536                         session.getClass()));
1537 
1538         return session;
1539     }
1540 
1541     /**
1542      * Configure a new camera session with output configurations.
1543      *
1544      * @param camera The CameraDevice to be configured.
1545      * @param outputs The OutputConfiguration list that is used for camera output.
1546      * @param listener The callback CameraDevice will notify when capture results are available.
1547      */
configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1548     public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera,
1549             List<OutputConfiguration> outputs,
1550             CameraCaptureSession.StateCallback listener, Handler handler)
1551             throws CameraAccessException {
1552         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1553         camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler);
1554         CameraCaptureSession session =
1555                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1556         assertFalse("Camera session should not be a reprocessable session",
1557                 session.isReprocessable());
1558         return session;
1559     }
1560 
1561     /**
1562      * Configure a new camera session with output configurations / a session color space.
1563      *
1564      * @param camera The CameraDevice to be configured.
1565      * @param outputs The OutputConfiguration list that is used for camera output.
1566      * @param listener The callback CameraDevice will notify when capture results are available.
1567      * @param colorSpace The ColorSpace for this session.
1568      */
configureCameraSessionWithColorSpace(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler, ColorSpace.Named colorSpace)1569     public static CameraCaptureSession configureCameraSessionWithColorSpace(CameraDevice camera,
1570             List<OutputConfiguration> outputs,
1571             CameraCaptureSession.StateCallback listener, Handler handler,
1572             ColorSpace.Named colorSpace) throws CameraAccessException {
1573         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1574         SessionConfiguration sessionConfiguration = new SessionConfiguration(
1575                 SessionConfiguration.SESSION_REGULAR, outputs,
1576                 new HandlerExecutor(handler), sessionListener);
1577         sessionConfiguration.setColorSpace(colorSpace);
1578         camera.createCaptureSession(sessionConfiguration);
1579         CameraCaptureSession session =
1580                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1581         assertFalse("Camera session should not be a reprocessable session",
1582                 session.isReprocessable());
1583         return session;
1584     }
1585 
1586     /**
1587      * Try configure a new camera session with output configurations.
1588      *
1589      * @param camera The CameraDevice to be configured.
1590      * @param outputs The OutputConfiguration list that is used for camera output.
1591      * @param initialRequest The session parameters passed in during stream configuration
1592      * @param listener The callback CameraDevice will notify when capture results are available.
1593      */
tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1594     public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera,
1595             List<OutputConfiguration> outputs, CaptureRequest initialRequest,
1596             CameraCaptureSession.StateCallback listener, Handler handler)
1597             throws CameraAccessException {
1598         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1599         SessionConfiguration sessionConfig = new SessionConfiguration(
1600                 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler),
1601                 sessionListener);
1602         sessionConfig.setSessionParameters(initialRequest);
1603         camera.createCaptureSession(sessionConfig);
1604 
1605         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1606                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1607         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1608                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1609 
1610         CameraCaptureSession session = null;
1611         if (state == BlockingSessionCallback.SESSION_READY) {
1612             session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1613             assertFalse("Camera session should not be a reprocessable session",
1614                     session.isReprocessable());
1615         }
1616         return session;
1617     }
1618 
1619     /**
1620      * Configure a new camera session with output surfaces and initial session parameters.
1621      *
1622      * @param camera The CameraDevice to be configured.
1623      * @param outputSurfaces The surface list that used for camera output.
1624      * @param listener The callback CameraDevice will notify when session is available.
1625      * @param handler The handler used to notify callbacks.
1626      * @param initialRequest Initial request settings to use as session parameters.
1627      */
configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1628     public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera,
1629             List<Surface> outputSurfaces, BlockingSessionCallback listener,
1630             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1631         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1632         for (Surface surface : outputSurfaces) {
1633             outConfigurations.add(new OutputConfiguration(surface));
1634         }
1635         SessionConfiguration sessionConfig = new SessionConfiguration(
1636                 SessionConfiguration.SESSION_REGULAR, outConfigurations,
1637                 new HandlerExecutor(handler), listener);
1638         sessionConfig.setSessionParameters(initialRequest);
1639         camera.createCaptureSession(sessionConfig);
1640 
1641         CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1642         assertFalse("Camera session should not be a reprocessable session",
1643                 session.isReprocessable());
1644         assertFalse("Capture session type must be regular",
1645                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1646                         session.getClass()));
1647 
1648         return session;
1649     }
1650 
1651     /**
1652      * Configure a new camera session with output surfaces.
1653      *
1654      * @param camera The CameraDevice to be configured.
1655      * @param outputSurfaces The surface list that used for camera output.
1656      * @param listener The callback CameraDevice will notify when capture results are available.
1657      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1658     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1659             List<Surface> outputSurfaces,
1660             CameraCaptureSession.StateCallback listener, Handler handler)
1661             throws CameraAccessException {
1662 
1663         return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false,
1664                 listener, handler);
1665     }
1666 
configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1667     public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
1668             InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
1669             CameraCaptureSession.StateCallback listener, Handler handler)
1670             throws CameraAccessException {
1671         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
1672         for (Surface surface : outputSurfaces) {
1673             outputConfigs.add(new OutputConfiguration(surface));
1674         }
1675         CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations(
1676                 camera, inputConfiguration, outputConfigs, listener, handler);
1677 
1678         return session;
1679     }
1680 
configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1681     public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations(
1682             CameraDevice camera, InputConfiguration inputConfiguration,
1683             List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener,
1684             Handler handler) throws CameraAccessException {
1685         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1686         SessionConfiguration sessionConfig = new SessionConfiguration(
1687                 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler),
1688                 sessionListener);
1689         sessionConfig.setInputConfiguration(inputConfiguration);
1690         camera.createCaptureSession(sessionConfig);
1691 
1692         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1693                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1694         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1695                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1696 
1697         assertTrue("Creating a reprocessable session failed.",
1698                 state == BlockingSessionCallback.SESSION_READY);
1699         CameraCaptureSession session =
1700                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1701         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1702 
1703         return session;
1704     }
1705 
1706     /**
1707      * Create a reprocessable camera session with input and output configurations.
1708      *
1709      * @param camera The CameraDevice to be configured.
1710      * @param inputConfiguration The input configuration used to create this session.
1711      * @param outputs The output configurations used to create this session.
1712      * @param listener The callback CameraDevice will notify when capture results are available.
1713      * @param handler The handler used to notify callbacks.
1714      * @return The session ready to use.
1715      * @throws CameraAccessException
1716      */
configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1717     public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera,
1718             InputConfiguration inputConfiguration, List<OutputConfiguration> outputs,
1719             CameraCaptureSession.StateCallback listener, Handler handler)
1720             throws CameraAccessException {
1721         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1722         camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs,
1723                 sessionListener, handler);
1724 
1725         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1726                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1727         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1728                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1729 
1730         assertTrue("Creating a reprocessable session failed.",
1731                 state == BlockingSessionCallback.SESSION_READY);
1732 
1733         CameraCaptureSession session =
1734                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1735         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1736 
1737         return session;
1738     }
1739 
assertArrayNotEmpty(T arr, String message)1740     public static <T> void assertArrayNotEmpty(T arr, String message) {
1741         assertTrue(message, arr != null && Array.getLength(arr) > 0);
1742     }
1743 
1744     /**
1745      * Check if the format is a legal YUV format camera supported.
1746      */
checkYuvFormat(int format)1747     public static void checkYuvFormat(int format) {
1748         if ((format != ImageFormat.YUV_420_888) &&
1749                 (format != ImageFormat.NV21) &&
1750                 (format != ImageFormat.YV12)) {
1751             fail("Wrong formats: " + format);
1752         }
1753     }
1754 
1755     /**
1756      * Check if image size and format match given size and format.
1757      */
checkImage(Image image, int width, int height, int format)1758     public static void checkImage(Image image, int width, int height, int format) {
1759         checkImage(image, width, height, format, /*colorSpace*/null);
1760     }
1761 
1762     /**
1763      * Check if image size and format match given size and format.
1764      */
checkImage(Image image, int width, int height, int format, ColorSpace colorSpace)1765     public static void checkImage(Image image, int width, int height, int format,
1766             ColorSpace colorSpace) {
1767         // Image reader will wrap YV12/NV21 image by YUV_420_888
1768         if (format == ImageFormat.NV21 || format == ImageFormat.YV12) {
1769             format = ImageFormat.YUV_420_888;
1770         }
1771         assertNotNull("Input image is invalid", image);
1772         assertEquals("Format doesn't match", format, image.getFormat());
1773         assertEquals("Width doesn't match", width, image.getWidth());
1774         assertEquals("Height doesn't match", height, image.getHeight());
1775 
1776         if (colorSpace != null && format != ImageFormat.JPEG && format != ImageFormat.JPEG_R
1777                 && format != ImageFormat.HEIC) {
1778             int dataSpace = image.getDataSpace();
1779             ColorSpace actualColorSpace = ColorSpace.getFromDataSpace(dataSpace);
1780             assertNotNull("getFromDataSpace() returned null for format "
1781                     + format + ", dataSpace " + dataSpace, actualColorSpace);
1782             assertEquals("colorSpace " + actualColorSpace.getId()
1783                     + " does not match expected color space "
1784                     + colorSpace.getId(), colorSpace.getId(), actualColorSpace.getId());
1785         }
1786     }
1787 
1788     /**
1789      * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked
1790      * 1-D linear byte array, such that it can be write into disk, or accessed by
1791      * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input
1792      * Image format.</p>
1793      *
1794      * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
1795      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
1796      * (xstride = width, ystride = height for chroma and luma components).</p>
1797      *
1798      * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p>
1799      *
1800      * <p>For YUV P010, it returns a byte array that contains Y plane first, followed
1801      * by the interleaved U(Cb)/V(Cr) plane.</p>
1802      */
getDataFromImage(Image image)1803     public static byte[] getDataFromImage(Image image) {
1804         assertNotNull("Invalid image:", image);
1805         int format = image.getFormat();
1806         int width = image.getWidth();
1807         int height = image.getHeight();
1808         int rowStride, pixelStride;
1809         byte[] data = null;
1810 
1811         // Read image data
1812         Plane[] planes = image.getPlanes();
1813         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
1814 
1815         // Check image validity
1816         checkAndroidImageFormat(image);
1817 
1818         ByteBuffer buffer = null;
1819         // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
1820         // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC
1821         if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD ||
1822                 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG ||
1823                 format == ImageFormat.HEIC || format == ImageFormat.JPEG_R) {
1824             buffer = planes[0].getBuffer();
1825             assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer);
1826             data = new byte[buffer.remaining()];
1827             buffer.get(data);
1828             buffer.rewind();
1829             return data;
1830         } else if (format == ImageFormat.YCBCR_P010) {
1831             // P010 samples are stored within 16 bit values
1832             int offset = 0;
1833             int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
1834             data = new byte[width * height * bytesPerPixelRounded];
1835             assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length,
1836                     planes.length == 3);
1837             for (int i = 0; i < 2; i++) {
1838                 buffer = planes[i].getBuffer();
1839                 assertNotNull("Fail to get bytebuffer from plane", buffer);
1840                 buffer.rewind();
1841                 rowStride = planes[i].getRowStride();
1842                 if (VERBOSE) {
1843                     Log.v(TAG, "rowStride " + rowStride);
1844                     Log.v(TAG, "width " + width);
1845                     Log.v(TAG, "height " + height);
1846                 }
1847                 int h = (i == 0) ? height : height / 2;
1848                 for (int row = 0; row < h; row++) {
1849                     // Each 10-bit pixel occupies 2 bytes
1850                     int length = 2 * width;
1851                     buffer.get(data, offset, length);
1852                     offset += length;
1853                     if (row < h - 1) {
1854                         buffer.position(buffer.position() + rowStride - length);
1855                     }
1856                 }
1857                 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1858                 buffer.rewind();
1859             }
1860             return data;
1861         }
1862 
1863         int offset = 0;
1864         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
1865         int maxRowSize = planes[0].getRowStride();
1866         for (int i = 0; i < planes.length; i++) {
1867             if (maxRowSize < planes[i].getRowStride()) {
1868                 maxRowSize = planes[i].getRowStride();
1869             }
1870         }
1871         byte[] rowData = new byte[maxRowSize];
1872         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
1873         for (int i = 0; i < planes.length; i++) {
1874             buffer = planes[i].getBuffer();
1875             assertNotNull("Fail to get bytebuffer from plane", buffer);
1876             buffer.rewind();
1877             rowStride = planes[i].getRowStride();
1878             pixelStride = planes[i].getPixelStride();
1879             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
1880             if (VERBOSE) {
1881                 Log.v(TAG, "pixelStride " + pixelStride);
1882                 Log.v(TAG, "rowStride " + rowStride);
1883                 Log.v(TAG, "width " + width);
1884                 Log.v(TAG, "height " + height);
1885             }
1886             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
1887             int w = (i == 0) ? width : width / 2;
1888             int h = (i == 0) ? height : height / 2;
1889             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
1890             for (int row = 0; row < h; row++) {
1891                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
1892                 int length;
1893                 if (pixelStride == bytesPerPixel) {
1894                     // Special case: optimized read of the entire row
1895                     length = w * bytesPerPixel;
1896                     buffer.get(data, offset, length);
1897                     offset += length;
1898                 } else {
1899                     // Generic case: should work for any pixelStride but slower.
1900                     // Use intermediate buffer to avoid read byte-by-byte from
1901                     // DirectByteBuffer, which is very bad for performance
1902                     length = (w - 1) * pixelStride + bytesPerPixel;
1903                     buffer.get(rowData, 0, length);
1904                     for (int col = 0; col < w; col++) {
1905                         data[offset++] = rowData[col * pixelStride];
1906                     }
1907                 }
1908                 // Advance buffer the remainder of the row stride
1909                 if (row < h - 1) {
1910                     buffer.position(buffer.position() + rowStride - length);
1911                 }
1912             }
1913             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1914             buffer.rewind();
1915         }
1916         return data;
1917     }
1918 
1919     /**
1920      * <p>Check android image format validity for an image, only support below formats:</p>
1921      *
1922      * <p>YUV_420_888/NV21/YV12, can add more for future</p>
1923      */
checkAndroidImageFormat(Image image)1924     public static void checkAndroidImageFormat(Image image) {
1925         int format = image.getFormat();
1926         Plane[] planes = image.getPlanes();
1927         switch (format) {
1928             case ImageFormat.YUV_420_888:
1929             case ImageFormat.NV21:
1930             case ImageFormat.YV12:
1931             case ImageFormat.YCBCR_P010:
1932                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
1933                 break;
1934             case ImageFormat.JPEG:
1935             case ImageFormat.RAW_SENSOR:
1936             case ImageFormat.RAW_PRIVATE:
1937             case ImageFormat.DEPTH16:
1938             case ImageFormat.DEPTH_POINT_CLOUD:
1939             case ImageFormat.DEPTH_JPEG:
1940             case ImageFormat.Y8:
1941             case ImageFormat.HEIC:
1942             case ImageFormat.JPEG_R:
1943                 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length);
1944                 break;
1945             default:
1946                 fail("Unsupported Image Format: " + format);
1947         }
1948     }
1949 
dumpFile(String fileName, Bitmap data)1950     public static void dumpFile(String fileName, Bitmap data) {
1951         FileOutputStream outStream;
1952         try {
1953             Log.v(TAG, "output will be saved as " + fileName);
1954             outStream = new FileOutputStream(fileName);
1955         } catch (IOException ioe) {
1956             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1957         }
1958 
1959         try {
1960             data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream);
1961             outStream.close();
1962         } catch (IOException ioe) {
1963             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1964         }
1965     }
1966 
dumpFile(String fileName, byte[] data)1967     public static void dumpFile(String fileName, byte[] data) {
1968         FileOutputStream outStream;
1969         try {
1970             Log.v(TAG, "output will be saved as " + fileName);
1971             outStream = new FileOutputStream(fileName);
1972         } catch (IOException ioe) {
1973             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1974         }
1975 
1976         try {
1977             outStream.write(data);
1978             outStream.close();
1979         } catch (IOException ioe) {
1980             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1981         }
1982     }
1983 
1984     /**
1985      * Get the available output sizes for the user-defined {@code format}.
1986      *
1987      * <p>Note that implementation-defined/hidden formats are not supported.</p>
1988      */
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)1989     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1990             CameraManager cameraManager) throws CameraAccessException {
1991         return getSupportedSizeForFormat(format, cameraId, cameraManager,
1992                 /*maxResolution*/false);
1993     }
1994 
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager, boolean maxResolution)1995     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1996             CameraManager cameraManager, boolean maxResolution) throws CameraAccessException {
1997         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
1998         assertNotNull("Can't get camera characteristics!", properties);
1999         if (VERBOSE) {
2000             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
2001         }
2002         CameraCharacteristics.Key<StreamConfigurationMap> configMapTag = maxResolution ?
2003                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION :
2004                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
2005         StreamConfigurationMap configMap = properties.get(configMapTag);
2006         if (configMap == null) {
2007             assertTrue("SCALER_STREAM_CONFIGURATION_MAP is null!", maxResolution);
2008             return null;
2009         }
2010 
2011         Size[] availableSizes = configMap.getOutputSizes(format);
2012         if (!maxResolution) {
2013             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: "
2014                     + format);
2015         }
2016         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
2017         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
2018             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
2019             System.arraycopy(availableSizes, 0, allSizes, 0,
2020                     availableSizes.length);
2021             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
2022                     highResAvailableSizes.length);
2023             availableSizes = allSizes;
2024         }
2025         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
2026         return availableSizes;
2027     }
2028 
2029     /**
2030      * Get the available output sizes for the given class.
2031      *
2032      */
getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)2033     public static Size[] getSupportedSizeForClass(Class klass, String cameraId,
2034             CameraManager cameraManager) throws CameraAccessException {
2035         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
2036         assertNotNull("Can't get camera characteristics!", properties);
2037         if (VERBOSE) {
2038             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
2039         }
2040         StreamConfigurationMap configMap =
2041                 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2042         Size[] availableSizes = configMap.getOutputSizes(klass);
2043         assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: "
2044                 + klass);
2045         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
2046         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
2047             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
2048             System.arraycopy(availableSizes, 0, allSizes, 0,
2049                     availableSizes.length);
2050             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
2051                     highResAvailableSizes.length);
2052             availableSizes = allSizes;
2053         }
2054         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
2055         return availableSizes;
2056     }
2057 
2058     /**
2059      * Size comparator that compares the number of pixels it covers.
2060      *
2061      * <p>If two the areas of two sizes are same, compare the widths.</p>
2062      */
2063     public static class SizeComparator implements Comparator<Size> {
2064         @Override
compare(Size lhs, Size rhs)2065         public int compare(Size lhs, Size rhs) {
2066             return CameraUtils
2067                     .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight());
2068         }
2069     }
2070 
2071     /**
2072      * Get sorted size list in descending order. Remove the sizes larger than
2073      * the bound. If the bound is null, don't do the size bound filtering.
2074      */
getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)2075     static public List<Size> getSupportedPreviewSizes(String cameraId,
2076             CameraManager cameraManager, Size bound) throws CameraAccessException {
2077 
2078         Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId,
2079                 cameraManager);
2080         assertArrayNotEmpty(rawSizes,
2081                 "Available sizes for SurfaceHolder class should not be empty");
2082         if (VERBOSE) {
2083             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2084         }
2085 
2086         if (bound == null) {
2087             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2088         }
2089 
2090         List<Size> sizes = new ArrayList<Size>();
2091         for (Size sz: rawSizes) {
2092             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2093                 sizes.add(sz);
2094             }
2095         }
2096         return getAscendingOrderSizes(sizes, /*ascending*/false);
2097     }
2098 
2099     /**
2100      * Get a sorted list of sizes from a given size list.
2101      *
2102      * <p>
2103      * The size is compare by area it covers, if the areas are same, then
2104      * compare the widths.
2105      * </p>
2106      *
2107      * @param sizeList The input size list to be sorted
2108      * @param ascending True if the order is ascending, otherwise descending order
2109      * @return The ordered list of sizes
2110      */
getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)2111     static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) {
2112         if (sizeList == null) {
2113             throw new IllegalArgumentException("sizeList shouldn't be null");
2114         }
2115 
2116         Comparator<Size> comparator = new SizeComparator();
2117         List<Size> sortedSizes = new ArrayList<Size>();
2118         sortedSizes.addAll(sizeList);
2119         Collections.sort(sortedSizes, comparator);
2120         if (!ascending) {
2121             Collections.reverse(sortedSizes);
2122         }
2123 
2124         return sortedSizes;
2125     }
2126     /**
2127      * Get sorted (descending order) size list for given format. Remove the sizes larger than
2128      * the bound. If the bound is null, don't do the size bound filtering.
2129      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)2130     static public List<Size> getSortedSizesForFormat(String cameraId,
2131             CameraManager cameraManager, int format, Size bound) throws CameraAccessException {
2132         return getSortedSizesForFormat(cameraId, cameraManager, format, /*maxResolution*/false,
2133                 bound);
2134     }
2135 
2136     /**
2137      * Get sorted (descending order) size list for given format (with an option to get sizes from
2138      * the maximum resolution stream configuration map). Remove the sizes larger than
2139      * the bound. If the bound is null, don't do the size bound filtering.
2140      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, boolean maxResolution, Size bound)2141     static public List<Size> getSortedSizesForFormat(String cameraId,
2142             CameraManager cameraManager, int format, boolean maxResolution, Size bound)
2143             throws CameraAccessException {
2144         Comparator<Size> comparator = new SizeComparator();
2145         Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager, maxResolution);
2146         List<Size> sortedSizes = null;
2147         if (bound != null) {
2148             sortedSizes = new ArrayList<Size>(/*capacity*/1);
2149             for (Size sz : sizes) {
2150                 if (comparator.compare(sz, bound) <= 0) {
2151                     sortedSizes.add(sz);
2152                 }
2153             }
2154         } else {
2155             sortedSizes = Arrays.asList(sizes);
2156         }
2157         assertTrue("Supported size list should have at least one element",
2158                 sortedSizes.size() > 0);
2159 
2160         Collections.sort(sortedSizes, comparator);
2161         // Make it in descending order.
2162         Collections.reverse(sortedSizes);
2163         return sortedSizes;
2164     }
2165 
2166     /**
2167      * Get supported video size list for a given camera device.
2168      *
2169      * <p>
2170      * Filter out the sizes that are larger than the bound. If the bound is
2171      * null, don't do the size bound filtering.
2172      * </p>
2173      */
getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)2174     static public List<Size> getSupportedVideoSizes(String cameraId,
2175             CameraManager cameraManager, Size bound) throws CameraAccessException {
2176 
2177         Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class,
2178                 cameraId, cameraManager);
2179         assertArrayNotEmpty(rawSizes,
2180                 "Available sizes for MediaRecorder class should not be empty");
2181         if (VERBOSE) {
2182             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2183         }
2184 
2185         if (bound == null) {
2186             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2187         }
2188 
2189         List<Size> sizes = new ArrayList<Size>();
2190         for (Size sz: rawSizes) {
2191             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2192                 sizes.add(sz);
2193             }
2194         }
2195         return getAscendingOrderSizes(sizes, /*ascending*/false);
2196     }
2197 
2198     /**
2199      * Get supported video size list (descending order) for a given camera device.
2200      *
2201      * <p>
2202      * Filter out the sizes that are larger than the bound. If the bound is
2203      * null, don't do the size bound filtering.
2204      * </p>
2205      */
getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)2206     static public List<Size> getSupportedStillSizes(String cameraId,
2207             CameraManager cameraManager, Size bound) throws CameraAccessException {
2208         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound);
2209     }
2210 
getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)2211     static public List<Size> getSupportedHeicSizes(String cameraId,
2212             CameraManager cameraManager, Size bound) throws CameraAccessException {
2213         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound);
2214     }
2215 
getMinPreviewSize(String cameraId, CameraManager cameraManager)2216     static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager)
2217             throws CameraAccessException {
2218         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null);
2219         return sizes.get(sizes.size() - 1);
2220     }
2221 
2222     /**
2223      * Get max supported preview size for a camera device.
2224      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager)2225     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager)
2226             throws CameraAccessException {
2227         return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null);
2228     }
2229 
2230     /**
2231      * Get max preview size for a camera device in the supported sizes that are no larger
2232      * than the bound.
2233      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)2234     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)
2235             throws CameraAccessException {
2236         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound);
2237         return sizes.get(0);
2238     }
2239 
2240     /**
2241      * Get max depth size for a camera device.
2242      */
getMaxDepthSize(String cameraId, CameraManager cameraManager)2243     static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
2244             throws CameraAccessException {
2245         List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
2246                 /*bound*/ null);
2247         return sizes.get(0);
2248     }
2249 
2250     /**
2251      * Return the lower size
2252      * @param a first size
2253      *
2254      * @param b second size
2255      *
2256      * @return Size the smaller size
2257      *
2258      * @throws IllegalArgumentException if either param was null.
2259      *
2260      */
getMinSize(Size a, Size b)2261     @NonNull public static Size getMinSize(Size a, Size b) {
2262         if (a == null || b == null) {
2263             throw new IllegalArgumentException("sizes was empty");
2264         }
2265         if (a.getWidth() * a.getHeight() < b.getHeight() * b.getWidth()) {
2266             return a;
2267         }
2268         return b;
2269     }
2270 
2271     /**
2272      * Get the largest size by area.
2273      *
2274      * @param sizes an array of sizes, must have at least 1 element
2275      *
2276      * @return Largest Size
2277      *
2278      * @throws IllegalArgumentException if sizes was null or had 0 elements
2279      */
getMaxSize(Size... sizes)2280     public static Size getMaxSize(Size... sizes) {
2281         return getMaxSize(sizes, -1 /*aspectRatio*/);
2282     }
2283 
2284     /**
2285      * Get the largest size by area, and with given aspect ratio.
2286      *
2287      * @param sizes an array of sizes, must have at least 1 element
2288      * @param aspectRatio the aspect ratio to match. -1 if aspect ratio doesn't need to match.
2289      *
2290      * @return Largest Size. Null if no such size exists matching aspect ratio.
2291      *
2292      * @throws IllegalArgumentException if sizes was null or had 0 elements
2293      */
getMaxSize(Size[] sizes, float aspectRatio)2294     public static Size getMaxSize(Size[] sizes, float aspectRatio) {
2295         if (sizes == null || sizes.length == 0) {
2296             throw new IllegalArgumentException("sizes was empty");
2297         }
2298 
2299         Size sz = null;
2300         for (Size size : sizes) {
2301             float ar = 1.0f * size.getWidth() / size.getHeight();
2302             if (aspectRatio > 0 && Math.abs(ar - aspectRatio) > ASPECT_RATIO_MATCH_THRESHOLD) {
2303                 continue;
2304             }
2305 
2306             if (sz == null
2307                     || size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2308                 sz = size;
2309             }
2310         }
2311         return sz;
2312     }
2313 
2314     /**
2315      * Get the largest size by area within (less than) bound
2316      *
2317      * @param sizes an array of sizes, must have at least 1 element
2318      *
2319      * @return Largest Size. Null if no such size exists within bound.
2320      *
2321      * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid.
2322      */
getMaxSizeWithBound(Size[] sizes, int bound)2323     public static Size getMaxSizeWithBound(Size[] sizes, int bound) {
2324         if (sizes == null || sizes.length == 0) {
2325             throw new IllegalArgumentException("sizes was empty");
2326         }
2327         if (bound <= 0) {
2328             throw new IllegalArgumentException("bound is invalid");
2329         }
2330 
2331         Size sz = null;
2332         for (Size size : sizes) {
2333             if (size.getWidth() * size.getHeight() >= bound) {
2334                 continue;
2335             }
2336 
2337             if (sz == null ||
2338                     size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2339                 sz = size;
2340             }
2341         }
2342 
2343         return sz;
2344     }
2345 
2346     /**
2347      * Get maximum size in list that's equal or smaller to than the bound.
2348      *
2349      * Returns null if no size is smaller than or equal to the bound.
2350      */
getMaxSizeWithBound(Size[] sizes, Size bound)2351     private static Size getMaxSizeWithBound(Size[] sizes, Size bound) {
2352         return getMaxSizeWithBound(sizes, bound, -1 /*aspectRatio*/);
2353     }
2354 
2355     /**
2356      * Get maximum size in list that's equal or smaller to than the bound and matching
2357      * the aspect ratio.
2358      *
2359      * Returns null if no size is smaller than or equal to the bound while matching aspect
2360      * ratio.
2361      */
getMaxSizeWithBound(Size[] sizes, Size bound, float aspectRatio)2362     private static Size getMaxSizeWithBound(Size[] sizes, Size bound, float aspectRatio) {
2363         if (sizes == null || sizes.length == 0) {
2364             throw new IllegalArgumentException("sizes was empty");
2365         }
2366 
2367         Size sz = null;
2368         for (Size size : sizes) {
2369             // If matching aspect ratio is needed, check aspect ratio
2370             float ar = 1.0f * size.getWidth() / size.getHeight();
2371             if (aspectRatio > 0 && Math.abs(ar - aspectRatio) > ASPECT_RATIO_MATCH_THRESHOLD) {
2372                 continue;
2373             }
2374 
2375             if (size.getWidth() <= bound.getWidth() && size.getHeight() <= bound.getHeight()) {
2376 
2377                 if (sz == null) {
2378                     sz = size;
2379                 } else {
2380                     long curArea = sz.getWidth() * (long) sz.getHeight();
2381                     long newArea = size.getWidth() * (long) size.getHeight();
2382                     if (newArea > curArea) {
2383                         sz = size;
2384                     }
2385                 }
2386             }
2387         }
2388 
2389         assertTrue("No size under bound found: " + Arrays.toString(sizes) + " bound " + bound,
2390                 sz != null);
2391 
2392         return sz;
2393     }
2394 
2395     /**
2396      * Returns true if the given {@code array} contains the given element.
2397      *
2398      * @param array {@code array} to check for {@code elem}
2399      * @param elem {@code elem} to test for
2400      * @return {@code true} if the given element is contained
2401      */
contains(int[] array, int elem)2402     public static boolean contains(int[] array, int elem) {
2403         if (array == null) return false;
2404         for (int i = 0; i < array.length; i++) {
2405             if (elem == array[i]) return true;
2406         }
2407         return false;
2408     }
2409 
contains(long[] array, long elem)2410     public static boolean contains(long[] array, long elem) {
2411         if (array == null) return false;
2412         for (int i = 0; i < array.length; i++) {
2413             if (elem == array[i]) return true;
2414         }
2415         return false;
2416     }
2417 
2418     /**
2419      * Get object array from byte array.
2420      *
2421      * @param array Input byte array to be converted
2422      * @return Byte object array converted from input byte array
2423      */
toObject(byte[] array)2424     public static Byte[] toObject(byte[] array) {
2425         return convertPrimitiveArrayToObjectArray(array, Byte.class);
2426     }
2427 
2428     /**
2429      * Get object array from int array.
2430      *
2431      * @param array Input int array to be converted
2432      * @return Integer object array converted from input int array
2433      */
toObject(int[] array)2434     public static Integer[] toObject(int[] array) {
2435         return convertPrimitiveArrayToObjectArray(array, Integer.class);
2436     }
2437 
2438     /**
2439      * Get object array from float array.
2440      *
2441      * @param array Input float array to be converted
2442      * @return Float object array converted from input float array
2443      */
toObject(float[] array)2444     public static Float[] toObject(float[] array) {
2445         return convertPrimitiveArrayToObjectArray(array, Float.class);
2446     }
2447 
2448     /**
2449      * Get object array from double array.
2450      *
2451      * @param array Input double array to be converted
2452      * @return Double object array converted from input double array
2453      */
toObject(double[] array)2454     public static Double[] toObject(double[] array) {
2455         return convertPrimitiveArrayToObjectArray(array, Double.class);
2456     }
2457 
2458     /**
2459      * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]).
2460      *
2461      * @param array Input array object
2462      * @param wrapperClass The boxed class it converts to
2463      * @return Boxed version of primitive array
2464      */
convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2465     private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array,
2466             final Class<T> wrapperClass) {
2467         // getLength does the null check and isArray check already.
2468         int arrayLength = Array.getLength(array);
2469         if (arrayLength == 0) {
2470             throw new IllegalArgumentException("Input array shouldn't be empty");
2471         }
2472 
2473         @SuppressWarnings("unchecked")
2474         final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength);
2475         for (int i = 0; i < arrayLength; i++) {
2476             Array.set(result, i, Array.get(array, i));
2477         }
2478         return result;
2479     }
2480 
2481     /**
2482      * Update one 3A region in capture request builder if that region is supported. Do nothing
2483      * if the specified 3A region is not supported by camera device.
2484      * @param requestBuilder The request to be updated
2485      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2486      * @param regions The 3A regions to be set
2487      * @param staticInfo static metadata characteristics
2488      */
update3aRegion( CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, StaticMetadata staticInfo)2489     public static void update3aRegion(
2490             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions,
2491             StaticMetadata staticInfo)
2492     {
2493         int maxRegions;
2494         CaptureRequest.Key<MeteringRectangle[]> key;
2495 
2496         if (regions == null || regions.length == 0 || staticInfo == null) {
2497             throw new IllegalArgumentException("Invalid input 3A region!");
2498         }
2499 
2500         switch (algoIdx) {
2501             case INDEX_ALGORITHM_AE:
2502                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2503                 key = CaptureRequest.CONTROL_AE_REGIONS;
2504                 break;
2505             case INDEX_ALGORITHM_AWB:
2506                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2507                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2508                 break;
2509             case INDEX_ALGORITHM_AF:
2510                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2511                 key = CaptureRequest.CONTROL_AF_REGIONS;
2512                 break;
2513             default:
2514                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2515         }
2516 
2517         if (maxRegions >= regions.length) {
2518             requestBuilder.set(key, regions);
2519         }
2520     }
2521 
2522     /**
2523      * Validate one 3A region in capture result equals to expected region if that region is
2524      * supported. Do nothing if the specified 3A region is not supported by camera device.
2525      * @param result The capture result to be validated
2526      * @param partialResults The partial results to be validated
2527      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2528      * @param expectRegions The 3A regions expected in capture result
2529      * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio
2530      * @param staticInfo static metadata characteristics
2531      */
validate3aRegion( CaptureResult result, List<CaptureResult> partialResults, int algoIdx, MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)2532     public static void validate3aRegion(
2533             CaptureResult result, List<CaptureResult> partialResults, int algoIdx,
2534             MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)
2535     {
2536         // There are multiple cases where result 3A region could be slightly different than the
2537         // request:
2538         // 1. Distortion correction,
2539         // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger
2540         //    than 1.
2541         // 3. Precision loss due to converting to HAL zoom ratio and back
2542         // 4. Error magnification due to active array scale-up when zoom ratio API is used.
2543         //
2544         // To handle all these scenarios, make the threshold larger, and scale the threshold based
2545         // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller
2546         // than 1x.
2547         final int maxCoordOffset = 5;
2548         int maxRegions;
2549         CaptureResult.Key<MeteringRectangle[]> key;
2550         MeteringRectangle[] actualRegion;
2551 
2552         switch (algoIdx) {
2553             case INDEX_ALGORITHM_AE:
2554                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2555                 key = CaptureResult.CONTROL_AE_REGIONS;
2556                 break;
2557             case INDEX_ALGORITHM_AWB:
2558                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2559                 key = CaptureResult.CONTROL_AWB_REGIONS;
2560                 break;
2561             case INDEX_ALGORITHM_AF:
2562                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2563                 key = CaptureResult.CONTROL_AF_REGIONS;
2564                 break;
2565             default:
2566                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2567         }
2568 
2569         int maxDist = maxCoordOffset;
2570         if (scaleByZoomRatio) {
2571             Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO);
2572             for (CaptureResult partialResult : partialResults) {
2573                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
2574                 if (zoomRatioInPartial != null) {
2575                     assertEquals("CONTROL_ZOOM_RATIO in partial result must match"
2576                             + " that in final result", zoomRatio, zoomRatioInPartial);
2577                 }
2578             }
2579             maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f));
2580         }
2581 
2582         if (maxRegions > 0)
2583         {
2584             actualRegion = getValueNotNull(result, key);
2585             for (CaptureResult partialResult : partialResults) {
2586                 MeteringRectangle[] actualRegionInPartial = partialResult.get(key);
2587                 if (actualRegionInPartial != null) {
2588                     assertEquals("Key " + key.getName() + " in partial result must match"
2589                             + " that in final result", actualRegionInPartial, actualRegion);
2590                 }
2591             }
2592 
2593             for (int i = 0; i < actualRegion.length; i++) {
2594                 // If the expected region's metering weight is 0, allow the camera device
2595                 // to override it.
2596                 if (expectRegions[i].getMeteringWeight() == 0) {
2597                     continue;
2598                 }
2599 
2600                 Rect a = actualRegion[i].getRect();
2601                 Rect e = expectRegions[i].getRect();
2602 
2603                 if (VERBOSE) {
2604                     Log.v(TAG, "Actual region " + actualRegion[i].toString() +
2605                             ", expected region " + expectRegions[i].toString() +
2606                             ", maxDist " + maxDist);
2607                 }
2608                 assertTrue(
2609                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2610                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2611                     maxDist >= Math.abs(a.left - e.left));
2612 
2613                 assertTrue(
2614                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2615                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2616                     maxDist >= Math.abs(a.right - e.right));
2617 
2618                 assertTrue(
2619                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2620                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2621                     maxDist >= Math.abs(a.top - e.top));
2622                 assertTrue(
2623                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2624                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2625                     maxDist >= Math.abs(a.bottom - e.bottom));
2626             }
2627         }
2628     }
2629 
validateImage(Image image, int width, int height, int format, String filePath)2630     public static void validateImage(Image image, int width, int height, int format,
2631             String filePath) {
2632         validateImage(image, width, height, format, filePath, /*colorSpace*/ null);
2633     }
2634 
2635 
2636     /**
2637      * Validate image based on format and size.
2638      *
2639      * @param image The image to be validated.
2640      * @param width The image width.
2641      * @param height The image height.
2642      * @param format The image format.
2643      * @param filePath The debug dump file path, null if don't want to dump to
2644      *            file.
2645      * @param colorSpace The expected color space of the image, if desired (null otherwise).
2646      * @throws UnsupportedOperationException if calling with an unknown format
2647      */
validateImage(Image image, int width, int height, int format, String filePath, ColorSpace colorSpace)2648     public static void validateImage(Image image, int width, int height, int format,
2649             String filePath, ColorSpace colorSpace) {
2650         checkImage(image, width, height, format, colorSpace);
2651 
2652         if (format == ImageFormat.PRIVATE) {
2653             return;
2654         }
2655 
2656         /**
2657          * TODO: validate timestamp:
2658          * 1. capture result timestamp against the image timestamp (need
2659          * consider frame drops)
2660          * 2. timestamps should be monotonically increasing for different requests
2661          */
2662         if(VERBOSE) Log.v(TAG, "validating Image");
2663         byte[] data = getDataFromImage(image);
2664         assertTrue("Invalid image data", data != null && data.length > 0);
2665 
2666         switch (format) {
2667             // Clients must be able to process and handle depth jpeg images like any other
2668             // regular jpeg.
2669             case ImageFormat.DEPTH_JPEG:
2670             case ImageFormat.JPEG:
2671                 validateJpegData(data, width, height, filePath, colorSpace);
2672                 break;
2673             case ImageFormat.JPEG_R:
2674                 validateJpegData(data, width, height, filePath, null /*colorSpace*/,
2675                         true /*gainMapPresent*/);
2676                 break;
2677             case ImageFormat.YCBCR_P010:
2678                 validateP010Data(data, width, height, format, image.getTimestamp(), filePath);
2679                 break;
2680             case ImageFormat.YUV_420_888:
2681             case ImageFormat.YV12:
2682                 validateYuvData(data, width, height, format, image.getTimestamp(), filePath);
2683                 break;
2684             case ImageFormat.RAW_SENSOR:
2685                 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
2686                 break;
2687             case ImageFormat.DEPTH16:
2688                 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
2689                 break;
2690             case ImageFormat.DEPTH_POINT_CLOUD:
2691                 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
2692                 break;
2693             case ImageFormat.RAW_PRIVATE:
2694                 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath);
2695                 break;
2696             case ImageFormat.Y8:
2697                 validateY8Data(data, width, height, format, image.getTimestamp(), filePath);
2698                 break;
2699             case ImageFormat.HEIC:
2700                 validateHeicData(data, width, height, filePath);
2701                 break;
2702             default:
2703                 throw new UnsupportedOperationException("Unsupported format for validation: "
2704                         + format);
2705         }
2706     }
2707 
2708     public static class HandlerExecutor implements Executor {
2709         private final Handler mHandler;
2710 
HandlerExecutor(Handler handler)2711         public HandlerExecutor(Handler handler) {
2712             assertNotNull("handler must be valid", handler);
2713             mHandler = handler;
2714         }
2715 
2716         @Override
execute(Runnable runCmd)2717         public void execute(Runnable runCmd) {
2718             mHandler.post(runCmd);
2719         }
2720     }
2721 
2722     /**
2723      * Provide a mock for {@link CameraDevice.StateCallback}.
2724      *
2725      * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an
2726      * abstract class.</p>
2727      *
2728      * <p>
2729      * Use this instead of other classes when needing to verify interactions, since
2730      * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra
2731      * interactions which will cause false test failures.
2732      * </p>
2733      *
2734      */
2735     public static class MockStateCallback extends CameraDevice.StateCallback {
2736 
2737         @Override
onOpened(CameraDevice camera)2738         public void onOpened(CameraDevice camera) {
2739         }
2740 
2741         @Override
onDisconnected(CameraDevice camera)2742         public void onDisconnected(CameraDevice camera) {
2743         }
2744 
2745         @Override
onError(CameraDevice camera, int error)2746         public void onError(CameraDevice camera, int error) {
2747         }
2748 
MockStateCallback()2749         private MockStateCallback() {}
2750 
2751         /**
2752          * Create a Mockito-ready mocked StateCallback.
2753          */
mock()2754         public static MockStateCallback mock() {
2755             return Mockito.spy(new MockStateCallback());
2756         }
2757     }
2758 
validateJpegData(byte[] jpegData, int width, int height, String filePath)2759     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) {
2760         validateJpegData(jpegData, width, height, filePath, /*colorSpace*/ null);
2761     }
2762 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace)2763     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2764             ColorSpace colorSpace) {
2765         validateJpegData(jpegData, width, height, filePath, colorSpace, false /*gainMapPresent*/);
2766     }
2767 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace, boolean gainMapPresent)2768     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2769             ColorSpace colorSpace, boolean gainMapPresent) {
2770         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2771         // DecodeBound mode: only parse the frame header to get width/height.
2772         // it doesn't decode the pixel.
2773         bmpOptions.inJustDecodeBounds = true;
2774         BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions);
2775         assertEquals(width, bmpOptions.outWidth);
2776         assertEquals(height, bmpOptions.outHeight);
2777 
2778         // Pixel decoding mode: decode whole image. check if the image data
2779         // is decodable here.
2780         Bitmap bitmapImage = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
2781         assertNotNull("Decoding jpeg failed", bitmapImage);
2782         if (colorSpace != null) {
2783             ColorSpace bitmapColorSpace = bitmapImage.getColorSpace();
2784             boolean matchingColorSpace = colorSpace.equals(bitmapColorSpace);
2785             if (!matchingColorSpace) {
2786                 Log.e(TAG, "Expected color space:\n\t" + colorSpace);
2787                 Log.e(TAG, "Bitmap color space:\n\t" + bitmapColorSpace);
2788             }
2789             assertTrue("Color space mismatch in decoded jpeg!", matchingColorSpace);
2790         }
2791         if (gainMapPresent) {
2792             Gainmap gainMap = bitmapImage.getGainmap();
2793             assertNotNull(gainMap);
2794             assertNotNull(gainMap.getGainmapContents());
2795         }
2796         if (DEBUG && filePath != null) {
2797             String fileName =
2798                     filePath + "/" + width + "x" + height + ".jpeg";
2799             dumpFile(fileName, jpegData);
2800         }
2801     }
2802 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2803     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
2804             long ts, String filePath) {
2805         checkYuvFormat(format);
2806         if (VERBOSE) Log.v(TAG, "Validating YUV data");
2807         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2808         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
2809 
2810         // TODO: Can add data validation for test pattern.
2811 
2812         if (DEBUG && filePath != null) {
2813             String fileName =
2814                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv";
2815             dumpFile(fileName, yuvData);
2816         }
2817     }
2818 
validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2819     private static void validateP010Data(byte[] p010Data, int width, int height, int format,
2820             long ts, String filePath) {
2821         if (VERBOSE) Log.v(TAG, "Validating P010 data");
2822         // The P010 10 bit samples are stored in two bytes so the size needs to be adjusted
2823         // accordingly.
2824         int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
2825         int expectedSize = width * height * bytesPerPixelRounded;
2826         assertEquals("P010 data doesn't match", expectedSize, p010Data.length);
2827 
2828         if (DEBUG && filePath != null) {
2829             String fileName =
2830                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010";
2831             dumpFile(fileName, p010Data);
2832         }
2833     }
validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2834     private static void validateRaw16Data(byte[] rawData, int width, int height, int format,
2835             long ts, String filePath) {
2836         if (VERBOSE) Log.v(TAG, "Validating raw data");
2837         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2838         assertEquals("Raw data doesn't match", expectedSize, rawData.length);
2839 
2840         // TODO: Can add data validation for test pattern.
2841 
2842         if (DEBUG && filePath != null) {
2843             String fileName =
2844                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16";
2845             dumpFile(fileName, rawData);
2846         }
2847 
2848         return;
2849     }
2850 
validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2851     private static void validateY8Data(byte[] rawData, int width, int height, int format,
2852             long ts, String filePath) {
2853         if (VERBOSE) Log.v(TAG, "Validating Y8 data");
2854         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2855         assertEquals("Y8 data doesn't match", expectedSize, rawData.length);
2856 
2857         // TODO: Can add data validation for test pattern.
2858 
2859         if (DEBUG && filePath != null) {
2860             String fileName =
2861                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8";
2862             dumpFile(fileName, rawData);
2863         }
2864 
2865         return;
2866     }
2867 
validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2868     private static void validateRawPrivateData(byte[] rawData, int width, int height,
2869             long ts, String filePath) {
2870         if (VERBOSE) Log.v(TAG, "Validating private raw data");
2871         // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes
2872         int expectedSizeMin = width * height;
2873         int expectedSizeMax = width * height * 30;
2874 
2875         assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" +
2876                 expectedSizeMin + "," + expectedSizeMax + "]",
2877                 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax);
2878 
2879         if (DEBUG && filePath != null) {
2880             String fileName =
2881                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv";
2882             dumpFile(fileName, rawData);
2883         }
2884 
2885         return;
2886     }
2887 
validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2888     private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
2889             long ts, String filePath) {
2890 
2891         if (VERBOSE) Log.v(TAG, "Validating depth16 data");
2892         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2893         assertEquals("Depth data doesn't match", expectedSize, depthData.length);
2894 
2895 
2896         if (DEBUG && filePath != null) {
2897             String fileName =
2898                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
2899             dumpFile(fileName, depthData);
2900         }
2901 
2902         return;
2903 
2904     }
2905 
validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2906     private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
2907             long ts, String filePath) {
2908 
2909         if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
2910 
2911         // Can't validate size since it is variable
2912 
2913         if (DEBUG && filePath != null) {
2914             String fileName =
2915                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
2916             dumpFile(fileName, depthData);
2917         }
2918 
2919         return;
2920 
2921     }
2922 
validateHeicData(byte[] heicData, int width, int height, String filePath)2923     private static void validateHeicData(byte[] heicData, int width, int height, String filePath) {
2924         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2925         // DecodeBound mode: only parse the frame header to get width/height.
2926         // it doesn't decode the pixel.
2927         bmpOptions.inJustDecodeBounds = true;
2928         BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions);
2929         assertEquals(width, bmpOptions.outWidth);
2930         assertEquals(height, bmpOptions.outHeight);
2931 
2932         // Pixel decoding mode: decode whole image. check if the image data
2933         // is decodable here.
2934         assertNotNull("Decoding heic failed",
2935                 BitmapFactory.decodeByteArray(heicData, 0, heicData.length));
2936         if (DEBUG && filePath != null) {
2937             String fileName =
2938                     filePath + "/" + width + "x" + height + ".heic";
2939             dumpFile(fileName, heicData);
2940         }
2941     }
2942 
getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)2943     public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
2944         if (result == null) {
2945             throw new IllegalArgumentException("Result must not be null");
2946         }
2947 
2948         T value = result.get(key);
2949         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2950         return value;
2951     }
2952 
getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)2953     public static <T> T getValueNotNull(CameraCharacteristics characteristics,
2954             CameraCharacteristics.Key<T> key) {
2955         if (characteristics == null) {
2956             throw new IllegalArgumentException("Camera characteristics must not be null");
2957         }
2958 
2959         T value = characteristics.get(key);
2960         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2961         return value;
2962     }
2963 
2964     /**
2965      * Get a crop region for a given zoom factor and center position.
2966      * <p>
2967      * The center position is normalized position in range of [0, 1.0], where
2968      * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right
2969      * corner. The center position could limit the effective minimal zoom
2970      * factor, for example, if the center position is (0.75, 0.75), the
2971      * effective minimal zoom position becomes 2.0. If the requested zoom factor
2972      * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned.
2973      * </p>
2974      * <p>
2975      * The aspect ratio of the crop region is maintained the same as the aspect
2976      * ratio of active array.
2977      * </p>
2978      *
2979      * @param zoomFactor The zoom factor to generate the crop region, it must be
2980      *            >= 1.0
2981      * @param center The normalized zoom center point that is in the range of [0, 1].
2982      * @param maxZoom The max zoom factor supported by this device.
2983      * @param activeArray The active array size of this device.
2984      * @return crop region for the given normalized center and zoom factor.
2985      */
getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)2986     public static Rect getCropRegionForZoom(float zoomFactor, final PointF center,
2987             final float maxZoom, final Rect activeArray) {
2988         if (zoomFactor < 1.0) {
2989             throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0");
2990         }
2991         if (center.x > 1.0 || center.x < 0) {
2992             throw new IllegalArgumentException("center.x " + center.x
2993                     + " should be in range of [0, 1.0]");
2994         }
2995         if (center.y > 1.0 || center.y < 0) {
2996             throw new IllegalArgumentException("center.y " + center.y
2997                     + " should be in range of [0, 1.0]");
2998         }
2999         if (maxZoom < 1.0) {
3000             throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0");
3001         }
3002         if (activeArray == null) {
3003             throw new IllegalArgumentException("activeArray must not be null");
3004         }
3005 
3006         float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x),
3007                 Math.min(center.y, 1.0f - center.y));
3008         float minEffectiveZoom =  0.5f / minCenterLength;
3009         if (minEffectiveZoom > maxZoom) {
3010             throw new IllegalArgumentException("Requested center " + center.toString() +
3011                     " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max"
3012                             + " zoom factor " + maxZoom);
3013         }
3014 
3015         if (zoomFactor < minEffectiveZoom) {
3016             Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor "
3017                     + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom);
3018             zoomFactor = minEffectiveZoom;
3019         }
3020 
3021         int cropCenterX = (int)(activeArray.width() * center.x);
3022         int cropCenterY = (int)(activeArray.height() * center.y);
3023         int cropWidth = (int) (activeArray.width() / zoomFactor);
3024         int cropHeight = (int) (activeArray.height() / zoomFactor);
3025 
3026         return new Rect(
3027                 /*left*/cropCenterX - cropWidth / 2,
3028                 /*top*/cropCenterY - cropHeight / 2,
3029                 /*right*/ cropCenterX + cropWidth / 2,
3030                 /*bottom*/cropCenterY + cropHeight / 2);
3031     }
3032 
3033     /**
3034      * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps
3035      *
3036      * @param staticInfo camera static metadata
3037      * @return AeAvailableTargetFpsRanges in descending order by max fps
3038      */
getDescendingTargetFpsRanges(StaticMetadata staticInfo)3039     public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) {
3040         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3041         Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() {
3042             public int compare(Range<Integer> r1, Range<Integer> r2) {
3043                 return r2.getUpper() - r1.getUpper();
3044             }
3045         });
3046         return fpsRanges;
3047     }
3048 
3049     /**
3050      * Get AeAvailableTargetFpsRanges with max fps not exceeding 30
3051      *
3052      * @param staticInfo camera static metadata
3053      * @return AeAvailableTargetFpsRanges with max fps not exceeding 30
3054      */
getTargetFpsRangesUpTo30(StaticMetadata staticInfo)3055     public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) {
3056         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3057         ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>();
3058         for (Range<Integer> fpsRange : fpsRanges) {
3059             if (fpsRange.getUpper() <= 30) {
3060                 fpsRangesUpTo30.add(fpsRange);
3061             }
3062         }
3063         return fpsRangesUpTo30;
3064     }
3065 
3066     /**
3067      * Get AeAvailableTargetFpsRanges with max fps greater than 30
3068      *
3069      * @param staticInfo camera static metadata
3070      * @return AeAvailableTargetFpsRanges with max fps greater than 30
3071      */
getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)3072     public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) {
3073         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3074         ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>();
3075         for (Range<Integer> fpsRange : fpsRanges) {
3076             if (fpsRange.getUpper() > 30) {
3077                 fpsRangesGreaterThan30.add(fpsRange);
3078             }
3079         }
3080         return fpsRangesGreaterThan30;
3081     }
3082 
3083     /**
3084      * Calculate output 3A region from the intersection of input 3A region and cropped region.
3085      *
3086      * @param requestRegions The input 3A regions
3087      * @param cropRect The cropped region
3088      * @return expected 3A regions output in capture result
3089      */
getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)3090     public static MeteringRectangle[] getExpectedOutputRegion(
3091             MeteringRectangle[] requestRegions, Rect cropRect){
3092         MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
3093         for (int i = 0; i < requestRegions.length; i++) {
3094             Rect requestRect = requestRegions[i].getRect();
3095             Rect resultRect = new Rect();
3096             boolean intersect = resultRect.setIntersect(requestRect, cropRect);
3097             resultRegions[i] = new MeteringRectangle(
3098                     resultRect,
3099                     intersect ? requestRegions[i].getMeteringWeight() : 0);
3100         }
3101         return resultRegions;
3102     }
3103 
3104     /**
3105      * Copy source image data to destination image.
3106      *
3107      * @param src The source image to be copied from.
3108      * @param dst The destination image to be copied to.
3109      * @throws IllegalArgumentException If the source and destination images have
3110      *             different format, size, or one of the images is not copyable.
3111      */
imageCopy(Image src, Image dst)3112     public static void imageCopy(Image src, Image dst) {
3113         if (src == null || dst == null) {
3114             throw new IllegalArgumentException("Images should be non-null");
3115         }
3116         if (src.getFormat() != dst.getFormat()) {
3117             throw new IllegalArgumentException("Src and dst images should have the same format");
3118         }
3119         if (src.getFormat() == ImageFormat.PRIVATE ||
3120                 dst.getFormat() == ImageFormat.PRIVATE) {
3121             throw new IllegalArgumentException("PRIVATE format images are not copyable");
3122         }
3123 
3124         Size srcSize = new Size(src.getWidth(), src.getHeight());
3125         Size dstSize = new Size(dst.getWidth(), dst.getHeight());
3126         if (!srcSize.equals(dstSize)) {
3127             throw new IllegalArgumentException("source image size " + srcSize + " is different"
3128                     + " with " + "destination image size " + dstSize);
3129         }
3130 
3131         // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
3132         // not be writable. Maybe we should add an isWritable() method in image class.
3133 
3134         Plane[] srcPlanes = src.getPlanes();
3135         Plane[] dstPlanes = dst.getPlanes();
3136         ByteBuffer srcBuffer = null;
3137         ByteBuffer dstBuffer = null;
3138         for (int i = 0; i < srcPlanes.length; i++) {
3139             srcBuffer = srcPlanes[i].getBuffer();
3140             dstBuffer = dstPlanes[i].getBuffer();
3141             int srcPos = srcBuffer.position();
3142             srcBuffer.rewind();
3143             dstBuffer.rewind();
3144             int srcRowStride = srcPlanes[i].getRowStride();
3145             int dstRowStride = dstPlanes[i].getRowStride();
3146             int srcPixStride = srcPlanes[i].getPixelStride();
3147             int dstPixStride = dstPlanes[i].getPixelStride();
3148 
3149             if (srcPixStride > 2 || dstPixStride > 2) {
3150                 throw new IllegalArgumentException("source pixel stride " + srcPixStride +
3151                         " with destination pixel stride " + dstPixStride +
3152                         " is not supported");
3153             }
3154 
3155             if (srcRowStride == dstRowStride && srcPixStride == dstPixStride &&
3156                     srcPixStride == 1) {
3157                 // Fast path, just copy the content in the byteBuffer all together.
3158                 dstBuffer.put(srcBuffer);
3159             } else {
3160                 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
3161                 int srcRowByteCount = srcRowStride;
3162                 int dstRowByteCount = dstRowStride;
3163                 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)];
3164 
3165                 if (srcPixStride == dstPixStride && srcPixStride == 1) {
3166                     // Row by row copy case
3167                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3168                         if (row == effectivePlaneSize.getHeight() - 1) {
3169                             // Special case for interleaved planes: need handle the last row
3170                             // carefully to avoid memory corruption. Check if we have enough bytes
3171                             // to copy.
3172                             srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining());
3173                             dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining());
3174                         }
3175                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3176                         dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount);
3177                     }
3178                 } else {
3179                     // Row by row per pixel copy case
3180                     byte[] dstDataRow = new byte[dstRowByteCount];
3181                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3182                         if (row == effectivePlaneSize.getHeight() - 1) {
3183                             // Special case for interleaved planes: need handle the last row
3184                             // carefully to avoid memory corruption. Check if we have enough bytes
3185                             // to copy.
3186                             int remainingBytes = srcBuffer.remaining();
3187                             if (srcRowByteCount > remainingBytes) {
3188                                 srcRowByteCount = remainingBytes;
3189                             }
3190                             remainingBytes = dstBuffer.remaining();
3191                             if (dstRowByteCount > remainingBytes) {
3192                                 dstRowByteCount = remainingBytes;
3193                             }
3194                         }
3195                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3196                         int pos = dstBuffer.position();
3197                         dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount);
3198                         dstBuffer.position(pos);
3199                         for (int x = 0; x < effectivePlaneSize.getWidth(); x++) {
3200                             dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride];
3201                         }
3202                         dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount);
3203                     }
3204                 }
3205             }
3206             srcBuffer.position(srcPos);
3207             dstBuffer.rewind();
3208         }
3209     }
3210 
getEffectivePlaneSizeForImage(Image image, int planeIdx)3211     private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) {
3212         switch (image.getFormat()) {
3213             case ImageFormat.YUV_420_888:
3214                 if (planeIdx == 0) {
3215                     return new Size(image.getWidth(), image.getHeight());
3216                 } else {
3217                     return new Size(image.getWidth() / 2, image.getHeight() / 2);
3218                 }
3219             case ImageFormat.JPEG:
3220             case ImageFormat.RAW_SENSOR:
3221             case ImageFormat.RAW10:
3222             case ImageFormat.RAW12:
3223             case ImageFormat.DEPTH16:
3224                 return new Size(image.getWidth(), image.getHeight());
3225             case ImageFormat.PRIVATE:
3226                 return new Size(0, 0);
3227             default:
3228                 throw new UnsupportedOperationException(
3229                         String.format("Invalid image format %d", image.getFormat()));
3230         }
3231     }
3232 
3233     /**
3234      * <p>
3235      * Checks whether the two images are strongly equal.
3236      * </p>
3237      * <p>
3238      * Two images are strongly equal if and only if the data, formats, sizes,
3239      * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
3240      * images, the image data is not accessible thus the data comparison is
3241      * effectively skipped as the number of planes is zero.
3242      * </p>
3243      * <p>
3244      * Note that this method compares the pixel data even outside of the crop
3245      * region, which may not be necessary for general use case.
3246      * </p>
3247      *
3248      * @param lhsImg First image to be compared with.
3249      * @param rhsImg Second image to be compared with.
3250      * @return true if the two images are equal, false otherwise.
3251      * @throws IllegalArgumentException If either of image is null.
3252      */
isImageStronglyEqual(Image lhsImg, Image rhsImg)3253     public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) {
3254         if (lhsImg == null || rhsImg == null) {
3255             throw new IllegalArgumentException("Images should be non-null");
3256         }
3257 
3258         if (lhsImg.getFormat() != rhsImg.getFormat()) {
3259             Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format "
3260                     + rhsImg.getFormat());
3261             return false;
3262         }
3263 
3264         if (lhsImg.getWidth() != rhsImg.getWidth()) {
3265             Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width "
3266                     + rhsImg.getWidth());
3267             return false;
3268         }
3269 
3270         if (lhsImg.getHeight() != rhsImg.getHeight()) {
3271             Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height "
3272                     + rhsImg.getHeight());
3273             return false;
3274         }
3275 
3276         if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) {
3277             Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp()
3278                     + " is different with rhsImg timestamp " + rhsImg.getTimestamp());
3279             return false;
3280         }
3281 
3282         if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) {
3283             Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect()
3284                     + " is different with rhsImg crop rect " + rhsImg.getCropRect());
3285             return false;
3286         }
3287 
3288         // Compare data inside of the image.
3289         Plane[] lhsPlanes = lhsImg.getPlanes();
3290         Plane[] rhsPlanes = rhsImg.getPlanes();
3291         ByteBuffer lhsBuffer = null;
3292         ByteBuffer rhsBuffer = null;
3293         for (int i = 0; i < lhsPlanes.length; i++) {
3294             lhsBuffer = lhsPlanes[i].getBuffer();
3295             rhsBuffer = rhsPlanes[i].getBuffer();
3296             lhsBuffer.rewind();
3297             rhsBuffer.rewind();
3298             // Special case for YUV420_888 buffer with different layout or
3299             // potentially differently interleaved U/V planes.
3300             if (lhsImg.getFormat() == ImageFormat.YUV_420_888 &&
3301                     (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() ||
3302                      lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() ||
3303                      (lhsPlanes[i].getPixelStride() != 1))) {
3304                 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth();
3305                 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight();
3306                 int rowSizeL = lhsPlanes[i].getRowStride();
3307                 int rowSizeR = rhsPlanes[i].getRowStride();
3308                 byte[] lhsRow = new byte[rowSizeL];
3309                 byte[] rhsRow = new byte[rowSizeR];
3310                 int pixStrideL = lhsPlanes[i].getPixelStride();
3311                 int pixStrideR = rhsPlanes[i].getPixelStride();
3312                 for (int r = 0; r < height; r++) {
3313                     if (r == height -1) {
3314                         rowSizeL = lhsBuffer.remaining();
3315                         rowSizeR = rhsBuffer.remaining();
3316                     }
3317                     lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL);
3318                     rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR);
3319                     for (int c = 0; c < width; c++) {
3320                         if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) {
3321                             Log.i(TAG, String.format(
3322                                     "byte buffers for plane %d row %d col %d don't match.",
3323                                     i, r, c));
3324                             return false;
3325                         }
3326                     }
3327                 }
3328             } else {
3329                 // Compare entire buffer directly
3330                 if (!lhsBuffer.equals(rhsBuffer)) {
3331                     Log.i(TAG, "byte buffers for plane " +  i + " don't match.");
3332                     return false;
3333                 }
3334             }
3335         }
3336 
3337         return true;
3338     }
3339 
3340     /**
3341      * Set jpeg related keys in a capture request builder.
3342      *
3343      * @param builder The capture request builder to set the keys inl
3344      * @param exifData The exif data to set.
3345      * @param thumbnailSize The thumbnail size to set.
3346      * @param collector The camera error collector to collect errors.
3347      */
setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)3348     public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData,
3349             Size thumbnailSize, CameraErrorCollector collector) {
3350         builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize);
3351         builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation);
3352         builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation);
3353         builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality);
3354         builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
3355                 exifData.thumbnailQuality);
3356 
3357         // Validate request set and get.
3358         collector.expectEquals("JPEG thumbnail size request set and get should match",
3359                 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
3360         collector.expectTrue("GPS locations request set and get should match.",
3361                 areGpsFieldsEqual(exifData.gpsLocation,
3362                 builder.get(CaptureRequest.JPEG_GPS_LOCATION)));
3363         collector.expectEquals("JPEG orientation request set and get should match",
3364                 exifData.jpegOrientation,
3365                 builder.get(CaptureRequest.JPEG_ORIENTATION));
3366         collector.expectEquals("JPEG quality request set and get should match",
3367                 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY));
3368         collector.expectEquals("JPEG thumbnail quality request set and get should match",
3369                 exifData.thumbnailQuality,
3370                 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY));
3371     }
3372 
3373     /**
3374      * Simple validation of JPEG image size and format.
3375      * <p>
3376      * Only validate the image object basic correctness. It is fast, but doesn't actually
3377      * check the buffer data. Assert is used here as it make no sense to
3378      * continue the test if the jpeg image captured has some serious failures.
3379      * </p>
3380      *
3381      * @param image The captured JPEG/HEIC image
3382      * @param expectedSize Expected capture JEPG/HEIC size
3383      * @param format JPEG/HEIC image format
3384      */
basicValidateBlobImage(Image image, Size expectedSize, int format)3385     public static void basicValidateBlobImage(Image image, Size expectedSize, int format) {
3386         Size imageSz = new Size(image.getWidth(), image.getHeight());
3387         assertTrue(
3388                 String.format("Image size doesn't match (expected %s, actual %s) ",
3389                         expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz));
3390         assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"),
3391                 format, image.getFormat());
3392         assertNotNull("Image plane shouldn't be null", image.getPlanes());
3393         assertEquals("Image plane number should be 1", 1, image.getPlanes().length);
3394 
3395         // Jpeg/Heic decoding validate was done in ImageReaderTest,
3396         // no need to duplicate the test here.
3397     }
3398 
3399     /**
3400      * Verify the EXIF and JPEG related keys in a capture result are expected.
3401      * - Capture request get values are same as were set.
3402      * - capture result's exif data is the same as was set by
3403      *   the capture request.
3404      * - new tags in the result set by the camera service are
3405      *   present and semantically correct.
3406      *
3407      * @param image The output JPEG/HEIC image to verify.
3408      * @param captureResult The capture result to verify.
3409      * @param expectedSize The expected JPEG/HEIC size.
3410      * @param expectedThumbnailSize The expected thumbnail size.
3411      * @param expectedExifData The expected EXIF data
3412      * @param staticInfo The static metadata for the camera device.
3413      * @param allStaticInfo The camera Id to static metadata map for all cameras.
3414      * @param blobFilename The filename to dump the jpeg/heic to.
3415      * @param collector The camera error collector to collect errors.
3416      * @param format JPEG/HEIC format
3417      */
verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)3418     public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize,
3419             Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo,
3420             HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector,
3421             String debugFileNameBase, int format) throws Exception {
3422 
3423         basicValidateBlobImage(image, expectedSize, format);
3424 
3425         byte[] blobBuffer = getDataFromImage(image);
3426         // Have to dump into a file to be able to use ExifInterface
3427         String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg");
3428         String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix;
3429         dumpFile(blobFilename, blobBuffer);
3430         ExifInterface exif = new ExifInterface(blobFilename);
3431 
3432         if (expectedThumbnailSize.equals(new Size(0,0))) {
3433             collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)",
3434                     !exif.hasThumbnail());
3435         } else {
3436             collector.expectTrue("Jpeg must have thumbnail for thumbnail size " +
3437                     expectedThumbnailSize, exif.hasThumbnail());
3438         }
3439 
3440         // Validate capture result vs. request
3441         Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE);
3442         int orientationTested = expectedExifData.jpegOrientation;
3443         // Legacy shim always doesn't rotate thumbnail size
3444         if ((orientationTested == 90 || orientationTested == 270) &&
3445                 staticInfo.isHardwareLevelAtLeastLimited()) {
3446             int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3447                     /*defaultValue*/-1);
3448             if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
3449                 // Device physically rotated image+thumbnail data
3450                 // Expect thumbnail size to be also rotated
3451                 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(),
3452                         resultThumbnailSize.getWidth());
3453             }
3454         }
3455 
3456         collector.expectEquals("JPEG thumbnail size result and request should match",
3457                 expectedThumbnailSize, resultThumbnailSize);
3458         if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) !=
3459                 null) {
3460             collector.expectTrue("GPS location result and request should match.",
3461                     areGpsFieldsEqual(expectedExifData.gpsLocation,
3462                     captureResult.get(CaptureResult.JPEG_GPS_LOCATION)));
3463         }
3464         collector.expectEquals("JPEG orientation result and request should match",
3465                 expectedExifData.jpegOrientation,
3466                 captureResult.get(CaptureResult.JPEG_ORIENTATION));
3467         collector.expectEquals("JPEG quality result and request should match",
3468                 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY));
3469         collector.expectEquals("JPEG thumbnail quality result and request should match",
3470                 expectedExifData.thumbnailQuality,
3471                 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY));
3472 
3473         // Validate other exif tags for all non-legacy devices
3474         if (!staticInfo.isHardwareLevelLegacy()) {
3475             verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, allStaticInfo,
3476                     collector, expectedExifData);
3477         }
3478     }
3479 
getSurfaceUsage(Surface s)3480     public static Optional<Long> getSurfaceUsage(Surface s) {
3481         if (s == null || !s.isValid()) {
3482             Log.e(TAG, "Invalid Surface!");
3483             return Optional.empty();
3484         }
3485 
3486         long usage = 0;
3487         ImageWriter writer = ImageWriter.newInstance(s, /*maxImages*/1, ImageFormat.YUV_420_888);
3488         try {
3489             Image img = writer.dequeueInputImage();
3490             if (img != null) {
3491                 usage = img.getHardwareBuffer().getUsage();
3492                 img.close();
3493             } else {
3494                 Log.e(TAG, "Unable to dequeue ImageWriter buffer!");
3495                 return Optional.empty();
3496             }
3497         } finally {
3498             writer.close();
3499         }
3500 
3501         return Optional.of(usage);
3502     }
3503 
3504     /**
3505      * Get the degree of an EXIF orientation.
3506      */
getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)3507     private static int getExifOrientationInDegree(int exifOrientation,
3508             CameraErrorCollector collector) {
3509         switch (exifOrientation) {
3510             case ExifInterface.ORIENTATION_NORMAL:
3511                 return 0;
3512             case ExifInterface.ORIENTATION_ROTATE_90:
3513                 return 90;
3514             case ExifInterface.ORIENTATION_ROTATE_180:
3515                 return 180;
3516             case ExifInterface.ORIENTATION_ROTATE_270:
3517                 return 270;
3518             default:
3519                 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" +
3520                         "info based on the request orientation range");
3521                 return 0;
3522         }
3523     }
3524 
3525     /**
3526      * Get all of the supported focal lengths for capture result.
3527      *
3528      * If the camera is a logical camera, return the focal lengths of the logical camera
3529      * and its active physical camera.
3530      *
3531      * If the camera isn't a logical camera, return the focal lengths supported by the
3532      * single camera.
3533      */
getAvailableFocalLengthsForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3534     public static Set<Float> getAvailableFocalLengthsForResult(CaptureResult result,
3535             StaticMetadata staticInfo,
3536             HashMap<String, StaticMetadata> allStaticInfo) {
3537         Set<Float> focalLengths = new HashSet<Float>();
3538         float[] supportedFocalLengths = staticInfo.getAvailableFocalLengthsChecked();
3539         for (float focalLength : supportedFocalLengths) {
3540             focalLengths.add(focalLength);
3541         }
3542 
3543         if (staticInfo.isLogicalMultiCamera()) {
3544             boolean activePhysicalCameraIdSupported =
3545                     staticInfo.isActivePhysicalCameraIdSupported();
3546             Set<String> physicalCameraIds;
3547             if (activePhysicalCameraIdSupported) {
3548                 String activePhysicalCameraId = result.get(
3549                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3550                 physicalCameraIds = new HashSet<String>();
3551                 physicalCameraIds.add(activePhysicalCameraId);
3552             } else {
3553                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3554             }
3555 
3556             for (String physicalCameraId : physicalCameraIds) {
3557                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3558                 if (physicalStaticInfo != null) {
3559                     float[] focalLengthsArray =
3560                             physicalStaticInfo.getAvailableFocalLengthsChecked();
3561                     for (float focalLength: focalLengthsArray) {
3562                         focalLengths.add(focalLength);
3563                     }
3564                 }
3565             }
3566         }
3567 
3568         return focalLengths;
3569     }
3570 
3571     /**
3572      * Validate and return the focal length.
3573      *
3574      * @param result Capture result to get the focal length
3575      * @param supportedFocalLengths Valid focal lengths to check the result focal length against
3576      * @param collector The camera error collector
3577      * @return Focal length from capture result or -1 if focal length is not available.
3578      */
validateFocalLength(CaptureResult result, Set<Float> supportedFocalLengths, CameraErrorCollector collector)3579     private static float validateFocalLength(CaptureResult result,
3580             Set<Float> supportedFocalLengths, CameraErrorCollector collector) {
3581         Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH);
3582         if (collector.expectTrue("Focal length is invalid",
3583                 resultFocalLength != null && resultFocalLength > 0)) {
3584             collector.expectTrue("Focal length should be one of the available focal length",
3585                     supportedFocalLengths.contains(resultFocalLength));
3586             return resultFocalLength;
3587         }
3588         return -1;
3589     }
3590 
3591     /**
3592      * Get all of the supported apertures for capture result.
3593      *
3594      * If the camera is a logical camera, return the apertures of the logical camera
3595      * and its active physical camera.
3596      *
3597      * If the camera isn't a logical camera, return the apertures supported by the
3598      * single camera.
3599      */
getAvailableAperturesForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3600     private static Set<Float> getAvailableAperturesForResult(CaptureResult result,
3601             StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo) {
3602         Set<Float> allApertures = new HashSet<Float>();
3603         float[] supportedApertures = staticInfo.getAvailableAperturesChecked();
3604         for (float aperture : supportedApertures) {
3605             allApertures.add(aperture);
3606         }
3607 
3608         if (staticInfo.isLogicalMultiCamera()) {
3609             boolean activePhysicalCameraIdSupported =
3610                     staticInfo.isActivePhysicalCameraIdSupported();
3611             Set<String> physicalCameraIds;
3612             if (activePhysicalCameraIdSupported) {
3613                 String activePhysicalCameraId = result.get(
3614                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3615                 physicalCameraIds = new HashSet<String>();
3616                 physicalCameraIds.add(activePhysicalCameraId);
3617             } else {
3618                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3619             }
3620 
3621             for (String physicalCameraId : physicalCameraIds) {
3622                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3623                 if (physicalStaticInfo != null) {
3624                     float[] apertures = physicalStaticInfo.getAvailableAperturesChecked();
3625                     for (float aperture: apertures) {
3626                         allApertures.add(aperture);
3627                     }
3628                 }
3629             }
3630         }
3631 
3632         return allApertures;
3633     }
3634 
3635     /**
3636      * Validate and return the aperture.
3637      *
3638      * @param result Capture result to get the aperture
3639      * @return Aperture from capture result or -1 if aperture is not available.
3640      */
validateAperture(CaptureResult result, Set<Float> supportedApertures, CameraErrorCollector collector)3641     private static float validateAperture(CaptureResult result,
3642             Set<Float> supportedApertures, CameraErrorCollector collector) {
3643         Float resultAperture = result.get(CaptureResult.LENS_APERTURE);
3644         if (collector.expectTrue("Capture result aperture is invalid",
3645                 resultAperture != null && resultAperture > 0)) {
3646             collector.expectTrue("Aperture should be one of the available apertures",
3647                     supportedApertures.contains(resultAperture));
3648             return resultAperture;
3649         }
3650         return -1;
3651     }
3652 
3653     /**
3654      * Return the closest value in a Set of floats.
3655      */
getClosestValueInSet(Set<Float> values, float target)3656     private static float getClosestValueInSet(Set<Float> values, float target) {
3657         float minDistance = Float.MAX_VALUE;
3658         float closestValue = -1.0f;
3659         for(float value : values) {
3660             float distance = Math.abs(value - target);
3661             if (minDistance > distance) {
3662                 minDistance = distance;
3663                 closestValue = value;
3664             }
3665         }
3666 
3667         return closestValue;
3668     }
3669 
3670     /**
3671      * Return if two Location's GPS field are the same.
3672      */
areGpsFieldsEqual(Location a, Location b)3673     private static boolean areGpsFieldsEqual(Location a, Location b) {
3674         if (a == null || b == null) {
3675             return false;
3676         }
3677 
3678         return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
3679                 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
3680                 a.getProvider() == b.getProvider();
3681     }
3682 
3683     /**
3684      * Verify extra tags in JPEG EXIF
3685      */
verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)3686     private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize,
3687             CaptureResult result, StaticMetadata staticInfo,
3688             HashMap<String, StaticMetadata> allStaticInfo,
3689             CameraErrorCollector collector, ExifTestData expectedExifData)
3690             throws ParseException {
3691         /**
3692          * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION.
3693          * Orientation and exif width/height need to be tested carefully, two cases:
3694          *
3695          * 1. Device rotate the image buffer physically, then exif width/height may not match
3696          * the requested still capture size, we need swap them to check.
3697          *
3698          * 2. Device use the exif tag to record the image orientation, it doesn't rotate
3699          * the jpeg image buffer itself. In this case, the exif width/height should always match
3700          * the requested still capture size, and the exif orientation should always match the
3701          * requested orientation.
3702          *
3703          */
3704         int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0);
3705         int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0);
3706         Size exifSize = new Size(exifWidth, exifHeight);
3707         // Orientation could be missing, which is ok, default to 0.
3708         int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3709                 /*defaultValue*/-1);
3710         // Get requested orientation from result, because they should be same.
3711         if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) {
3712             int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION);
3713             final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED;
3714             final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270;
3715             boolean orientationValid = collector.expectTrue(String.format(
3716                     "Exif orientation must be in range of [%d, %d]",
3717                     ORIENTATION_MIN, ORIENTATION_MAX),
3718                     exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX);
3719             if (orientationValid) {
3720                 /**
3721                  * Device captured image doesn't respect the requested orientation,
3722                  * which means it rotates the image buffer physically. Then we
3723                  * should swap the exif width/height accordingly to compare.
3724                  */
3725                 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED;
3726 
3727                 if (deviceRotatedImage) {
3728                     // Case 1.
3729                     boolean needSwap = (requestedOrientation % 180 == 90);
3730                     if (needSwap) {
3731                         exifSize = new Size(exifHeight, exifWidth);
3732                     }
3733                 } else {
3734                     // Case 2.
3735                     collector.expectEquals("Exif orientaiton should match requested orientation",
3736                             requestedOrientation, getExifOrientationInDegree(exifOrientation,
3737                             collector));
3738                 }
3739             }
3740         }
3741 
3742         /**
3743          * Ideally, need check exifSize == jpegSize == actual buffer size. But
3744          * jpegSize == jpeg decode bounds size(from jpeg jpeg frame
3745          * header, not exif) was validated in ImageReaderTest, no need to
3746          * validate again here.
3747          */
3748         collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize);
3749 
3750         // TAG_DATETIME, it should be local time
3751         long currentTimeInMs = System.currentTimeMillis();
3752         long currentTimeInSecond = currentTimeInMs / 1000;
3753         Date date = new Date(currentTimeInMs);
3754         String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date);
3755         String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3756         if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) {
3757             collector.expectTrue("Exif TAG_DATETIME is wrong",
3758                     dateTime.length() == EXIF_DATETIME_LENGTH);
3759             long exifTimeInSecond =
3760                     new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000;
3761             long delta = currentTimeInSecond - exifTimeInSecond;
3762             collector.expectTrue("Capture time deviates too much from the current time",
3763                     Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC);
3764             // It should be local time.
3765             collector.expectTrue("Exif date time should be local time",
3766                     dateTime.startsWith(localDatetime));
3767         }
3768 
3769         boolean isExternalCamera = staticInfo.isExternalCamera();
3770         if (!isExternalCamera) {
3771             // TAG_FOCAL_LENGTH.
3772             Set<Float> focalLengths = getAvailableFocalLengthsForResult(
3773                     result, staticInfo, allStaticInfo);
3774             float exifFocalLength = (float)exif.getAttributeDouble(
3775                         ExifInterface.TAG_FOCAL_LENGTH, -1);
3776             collector.expectEquals("Focal length should match",
3777                     getClosestValueInSet(focalLengths, exifFocalLength),
3778                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3779             // More checks for focal length.
3780             collector.expectEquals("Exif focal length should match capture result",
3781                     validateFocalLength(result, focalLengths, collector),
3782                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3783 
3784             // TAG_EXPOSURE_TIME
3785             // ExifInterface API gives exposure time value in the form of float instead of rational
3786             String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME);
3787             collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime);
3788             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) {
3789                 if (exposureTime != null) {
3790                     double exposureTimeValue = Double.parseDouble(exposureTime);
3791                     long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
3792                     double expected = expTimeResult / 1e9;
3793                     double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO;
3794                     tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC);
3795                     collector.expectEquals("Exif exposure time doesn't match", expected,
3796                             exposureTimeValue, tolerance);
3797                 }
3798             }
3799 
3800             // TAG_APERTURE
3801             // ExifInterface API gives aperture value in the form of float instead of rational
3802             String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE);
3803             collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture);
3804             if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) {
3805                 Set<Float> apertures = getAvailableAperturesForResult(
3806                         result, staticInfo, allStaticInfo);
3807                 if (exifAperture != null) {
3808                     float apertureValue = Float.parseFloat(exifAperture);
3809                     collector.expectEquals("Aperture value should match",
3810                             getClosestValueInSet(apertures, apertureValue),
3811                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3812                     // More checks for aperture.
3813                     collector.expectEquals("Exif aperture length should match capture result",
3814                             validateAperture(result, apertures, collector),
3815                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3816                 }
3817             }
3818 
3819             // TAG_MAKE
3820             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3821             collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make);
3822 
3823             // TAG_MODEL
3824             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3825             collector.expectTrue("Exif TAG_MODEL is incorrect",
3826                     model.startsWith(Build.MODEL) || model.endsWith(Build.MODEL));
3827 
3828 
3829             // TAG_ISO
3830             int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1);
3831             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) ||
3832                     staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3833                 int expectedIso = 100;
3834                 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) {
3835                     expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
3836                 }
3837                 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3838                     expectedIso = expectedIso *
3839                             result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST);
3840                 } else {
3841                     expectedIso *= 100;
3842                 }
3843                 collector.expectInRange("Exif TAG_ISO is incorrect", iso,
3844                         expectedIso/100,((expectedIso + 50)/100) + MAX_ISO_MISMATCH);
3845             }
3846         } else {
3847             // External camera specific checks
3848             // TAG_MAKE
3849             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3850             collector.expectNotNull("Exif TAG_MAKE is null", make);
3851 
3852             // TAG_MODEL
3853             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3854             collector.expectNotNull("Exif TAG_MODEL is nuill", model);
3855         }
3856 
3857 
3858         /**
3859          * TAG_FLASH. TODO: For full devices, can check a lot more info
3860          * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash)
3861          */
3862         String flash = exif.getAttribute(ExifInterface.TAG_FLASH);
3863         collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash);
3864 
3865         /**
3866          * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we
3867          * should be able to cross-check android.sensor.referenceIlluminant.
3868          */
3869         String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE);
3870         collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance);
3871 
3872         // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras).
3873         String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED);
3874         collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime);
3875         if (digitizedTime != null) {
3876             String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3877             collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime);
3878             if (expectedDateTime != null) {
3879                 collector.expectEquals("dataTime should match digitizedTime",
3880                         expectedDateTime, digitizedTime);
3881             }
3882         }
3883 
3884         /**
3885          * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at
3886          * most 9 digits in ExifInterface implementation, use getAttributeInt to
3887          * sanitize it. When the default value -1 is returned, it means that
3888          * this exif tag either doesn't exist or is a non-numerical invalid
3889          * string. Same rule applies to the rest of sub second tags.
3890          */
3891         int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1);
3892         collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0);
3893 
3894         // TAG_SUBSEC_TIME_ORIG
3895         int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG,
3896                 /*defaultValue*/-1);
3897         collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!",
3898                 subSecTimeOrig >= 0);
3899 
3900         // TAG_SUBSEC_TIME_DIG
3901         int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG,
3902                 /*defaultValue*/-1);
3903         collector.expectTrue(
3904                 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0);
3905 
3906         /**
3907          * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP.
3908          * The GPS timestamp information should be in seconds UTC time.
3909          */
3910         String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP);
3911         collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp);
3912         String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP);
3913         collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp);
3914 
3915         SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z");
3916         String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC";
3917         Date gpsDateTime = dateFormat.parse(gpsExifTimeString);
3918         Date expected = new Date(expectedExifData.gpsLocation.getTime());
3919         collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime);
3920     }
3921 
3922 
3923     /**
3924      * Immutable class wrapping the exif test data.
3925      */
3926     public static class ExifTestData {
3927         public final Location gpsLocation;
3928         public final int jpegOrientation;
3929         public final byte jpegQuality;
3930         public final byte thumbnailQuality;
3931 
ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3932         public ExifTestData(Location location, int orientation,
3933                 byte jpgQuality, byte thumbQuality) {
3934             gpsLocation = location;
3935             jpegOrientation = orientation;
3936             jpegQuality = jpgQuality;
3937             thumbnailQuality = thumbQuality;
3938         }
3939     }
3940 
getPreviewSizeBound(WindowManager windowManager, Size bound)3941     public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) {
3942         WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
3943         Rect windowBounds = windowMetrics.getBounds();
3944 
3945         int windowHeight = windowBounds.height();
3946         int windowWidth = windowBounds.width();
3947 
3948         if (windowHeight > windowWidth) {
3949             windowHeight = windowWidth;
3950             windowWidth = windowBounds.height();
3951         }
3952 
3953         if (bound.getWidth() <= windowWidth
3954                 && bound.getHeight() <= windowHeight) {
3955             return bound;
3956         } else {
3957             return new Size(windowWidth, windowHeight);
3958         }
3959     }
3960 
3961     /**
3962      * Check if a particular stream configuration is supported by configuring it
3963      * to the device.
3964      */
isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)3965     public static boolean isStreamConfigurationSupported(CameraDevice camera,
3966             List<Surface> outputSurfaces,
3967             CameraCaptureSession.StateCallback listener, Handler handler) {
3968         try {
3969             configureCameraSession(camera, outputSurfaces, listener, handler);
3970             return true;
3971         } catch (Exception e) {
3972             Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage());
3973             return false;
3974         }
3975     }
3976 
3977     public final static class SessionConfigSupport {
3978         public final boolean error;
3979         public final boolean callSupported;
3980         public final boolean configSupported;
3981 
SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)3982         public SessionConfigSupport(boolean error,
3983                 boolean callSupported, boolean configSupported) {
3984             this.error = error;
3985             this.callSupported = callSupported;
3986             this.configSupported = configSupported;
3987         }
3988     }
3989 
3990     /**
3991      * Query whether a particular stream combination is supported.
3992      */
checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport, String msg)3993     public static void checkSessionConfigurationWithSurfaces(CameraDevice camera,
3994             Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig,
3995             int operatingMode, CameraManager manager, boolean defaultSupport, String msg)
3996             throws Exception {
3997         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
3998         for (Surface surface : outputSurfaces) {
3999             outConfigurations.add(new OutputConfiguration(surface));
4000         }
4001 
4002         checkSessionConfigurationSupported(camera, handler, outConfigurations,
4003                 inputConfig, operatingMode, manager, defaultSupport, msg);
4004     }
4005 
checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport, String msg)4006     public static void checkSessionConfigurationSupported(CameraDevice camera,
4007             Handler handler, List<OutputConfiguration> outputConfigs,
4008             InputConfiguration inputConfig, int operatingMode, CameraManager manager,
4009             boolean defaultSupport, String msg) throws Exception {
4010         SessionConfigSupport sessionConfigSupported =
4011                 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig,
4012                 operatingMode, manager, defaultSupport);
4013 
4014         assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported);
4015     }
4016 
4017     /**
4018      * Query whether a particular stream combination is supported.
4019      */
isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport)4020     public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera,
4021             Handler handler, List<OutputConfiguration> outputConfigs,
4022             InputConfiguration inputConfig, int operatingMode,
4023             CameraManager manager, boolean defaultSupport)
4024             throws android.hardware.camera2.CameraAccessException {
4025         boolean ret;
4026         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
4027 
4028         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
4029                 new HandlerExecutor(handler), sessionListener);
4030         if (inputConfig != null) {
4031             sessionConfig.setInputConfiguration(inputConfig);
4032         }
4033 
4034         // Verify that the return value of CameraDevice.isSessionConfigurationSupported is the
4035         // same as CameraDeviceSetup.isSessionConfigurationSupported.
4036         // Note: This check only makes sense if targetSdkVersion and platform's SDK Version >= V
4037         boolean deviceSetupSupported = false;
4038         boolean configSupportedByDeviceSetup = false;
4039         String cameraId = camera.getId();
4040         if (Build.VERSION.SDK_INT > Build.VERSION_CODES.UPSIDE_DOWN_CAKE
4041                 && Flags.cameraDeviceSetup() && manager.isCameraDeviceSetupSupported(cameraId)) {
4042             CameraDeviceSetup deviceSetup = manager.getCameraDeviceSetup(cameraId);
4043             assertNotNull("Failed to get camera device setup for " + cameraId, deviceSetup);
4044             deviceSetupSupported = true;
4045 
4046             configSupportedByDeviceSetup = deviceSetup.isSessionConfigurationSupported(
4047                     sessionConfig);
4048         }
4049 
4050         try {
4051             ret = camera.isSessionConfigurationSupported(sessionConfig);
4052         } catch (UnsupportedOperationException e) {
4053             // Camera doesn't support session configuration query
4054             assertFalse("If device setup is supported, "
4055                     + "CameraDevice.isSessionConfigurationSupported cannot throw"
4056                     + "unsupportedOperationException", deviceSetupSupported);
4057             return new SessionConfigSupport(false/*error*/,
4058                     false/*callSupported*/, defaultSupport/*configSupported*/);
4059         } catch (IllegalArgumentException e) {
4060             return new SessionConfigSupport(true/*error*/,
4061                     false/*callSupported*/, false/*configSupported*/);
4062         } catch (android.hardware.camera2.CameraAccessException e) {
4063             return new SessionConfigSupport(true/*error*/,
4064                     false/*callSupported*/, false/*configSupported*/);
4065         }
4066 
4067         if (deviceSetupSupported) {
4068             assertEquals("CameraDeviceSetup and CameraDevice must return the same value "
4069                     + "for isSessionConfigurationSupported!", ret, configSupportedByDeviceSetup);
4070         }
4071         return new SessionConfigSupport(false/*error*/,
4072                 true/*callSupported*/, ret/*configSupported*/);
4073     }
4074 
4075     /**
4076      * Check if a session configuration with parameters is supported.
4077      *
4078      * All OutputConfigurations contains valid output surfaces.
4079      */
isSessionConfigWithParamsSupported( CameraDevice.CameraDeviceSetup cameraDeviceSetup, Handler handler, List<OutputConfiguration> outputConfigs, int operatingMode, CaptureRequest request)4080     public static boolean isSessionConfigWithParamsSupported(
4081             CameraDevice.CameraDeviceSetup cameraDeviceSetup,
4082             Handler handler, List<OutputConfiguration> outputConfigs,
4083             int operatingMode, CaptureRequest request) throws CameraAccessException {
4084         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
4085         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
4086                 new HandlerExecutor(handler), sessionListener);
4087         sessionConfig.setSessionParameters(request);
4088 
4089         return cameraDeviceSetup.isSessionConfigurationSupported(sessionConfig);
4090     }
4091 
4092     /**
4093      * Check if a session configuration with parameters is supported.
4094      *
4095      * <p>OutputConfigurations do not contain the output surface. Additionally this function
4096      * checks the consistency of isSessionConfigurationSupported return value between the
4097      * incompleted SessionConfiguration and the completed SessionConfiguration after addSurface
4098      * is called.</p>
4099      */
isSessionConfigWithParamsSupportedChecked( CameraDevice.CameraDeviceSetup cameraDeviceSetup, List<Pair<OutputConfiguration, Surface>> outputConfigs2Steps, int operatingMode, CaptureRequest request)4100     public static boolean isSessionConfigWithParamsSupportedChecked(
4101             CameraDevice.CameraDeviceSetup cameraDeviceSetup,
4102             List<Pair<OutputConfiguration, Surface>> outputConfigs2Steps,
4103             int operatingMode, CaptureRequest request) throws CameraAccessException {
4104         List<OutputConfiguration> outputConfigs = new ArrayList<>();
4105         for (Pair<OutputConfiguration, Surface> c : outputConfigs2Steps) {
4106             outputConfigs.add(c.first);
4107         }
4108         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs);
4109         sessionConfig.setSessionParameters(request);
4110         boolean sessionConfigNoSurfaceSupported = cameraDeviceSetup.isSessionConfigurationSupported(
4111                 sessionConfig);
4112 
4113         // Add surfaces for the OutputConfigurations
4114         for (Pair<OutputConfiguration, Surface> c : outputConfigs2Steps) {
4115             OutputConfiguration config = c.first;
4116             Surface surface = c.second;
4117             if (config.getSurface() == null) {
4118                 config.addSurface(surface);
4119             }
4120         }
4121         boolean sessionConfigWithSurfaceSupported =
4122                 cameraDeviceSetup.isSessionConfigurationSupported(sessionConfig);
4123         assertEquals("isSessionConfigurationSupported return value shouldn't change before and "
4124                 + "after surfaces are added to SessionConfiguration",
4125                 sessionConfigNoSurfaceSupported, sessionConfigWithSurfaceSupported);
4126 
4127         return sessionConfigWithSurfaceSupported;
4128     }
4129 
4130     /**
4131      * Wait for numResultWait frames
4132      *
4133      * @param resultListener The capture listener to get capture result back.
4134      * @param numResultsWait Number of frame to wait
4135      * @param timeout Wait timeout in ms.
4136      *
4137      * @return the last result, or {@code null} if there was none
4138      */
waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)4139     public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener,
4140             int numResultsWait, int timeout) {
4141         if (numResultsWait < 0 || resultListener == null) {
4142             throw new IllegalArgumentException(
4143                     "Input must be positive number and listener must be non-null");
4144         }
4145 
4146         CaptureResult result = null;
4147         for (int i = 0; i < numResultsWait; i++) {
4148             result = resultListener.getCaptureResult(timeout);
4149         }
4150 
4151         return result;
4152     }
4153 
4154     /**
4155      * Wait for any expected result key values available in a certain number of results.
4156      *
4157      * <p>
4158      * Check the result immediately if numFramesWait is 0.
4159      * </p>
4160      *
4161      * @param listener The capture listener to get capture result.
4162      * @param resultKey The capture result key associated with the result value.
4163      * @param expectedValues The list of result value need to be waited for,
4164      * return immediately if the list is empty.
4165      * @param numResultsWait Number of frame to wait before times out.
4166      * @param timeout result wait time out in ms.
4167      * @throws TimeoutRuntimeException If more than numResultsWait results are.
4168      * seen before the result matching myRequest arrives, or each individual wait
4169      * for result times out after 'timeout' ms.
4170      */
waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)4171     public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener,
4172             CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait,
4173             int timeout) {
4174         if (numResultsWait < 0 || listener == null || expectedValues == null) {
4175             throw new IllegalArgumentException(
4176                     "Input must be non-negative number and listener/expectedValues "
4177                     + "must be non-null");
4178         }
4179 
4180         int i = 0;
4181         CaptureResult result;
4182         do {
4183             result = listener.getCaptureResult(timeout);
4184             T value = result.get(resultKey);
4185             for ( T expectedValue : expectedValues) {
4186                 if (VERBOSE) {
4187                     Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: "
4188                             + value.toString());
4189                 }
4190                 if (value.equals(expectedValue)) {
4191                     return;
4192                 }
4193             }
4194         } while (i++ < numResultsWait);
4195 
4196         throw new TimeoutRuntimeException(
4197                 "Unable to get the expected result value " + expectedValues + " for key " +
4198                         resultKey.getName() + " after waiting for " + numResultsWait + " results");
4199     }
4200 
4201     /**
4202      * Wait for expected result key value available in a certain number of results.
4203      *
4204      * <p>
4205      * Check the result immediately if numFramesWait is 0.
4206      * </p>
4207      *
4208      * @param listener The capture listener to get capture result
4209      * @param resultKey The capture result key associated with the result value
4210      * @param expectedValue The result value need to be waited for
4211      * @param numResultsWait Number of frame to wait before times out
4212      * @param timeout Wait time out.
4213      * @throws TimeoutRuntimeException If more than numResultsWait results are
4214      * seen before the result matching myRequest arrives, or each individual wait
4215      * for result times out after 'timeout' ms.
4216      */
waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)4217     public static <T> void waitForResultValue(SimpleCaptureCallback listener,
4218             CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) {
4219         List<T> expectedValues = new ArrayList<T>();
4220         expectedValues.add(expectedValue);
4221         waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout);
4222     }
4223 
4224     /**
4225      * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
4226      *
4227      * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
4228      * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
4229      * is unknown.</p>
4230      *
4231      * <p>This is a no-op for {@code LEGACY} devices since they don't report
4232      * the {@code aeState} result.</p>
4233      *
4234      * @param resultListener The capture listener to get capture result back.
4235      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4236      *                                       unknown.
4237      * @param staticInfo corresponding camera device static metadata.
4238      * @param settingsTimeout wait timeout for settings application in ms.
4239      * @param resultTimeout wait timeout for result in ms.
4240      * @param numResultsWait Number of frame to wait before times out.
4241      */
waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)4242     public static void waitForAeStable(SimpleCaptureCallback resultListener,
4243             int numResultWaitForUnknownLatency, StaticMetadata staticInfo,
4244             int settingsTimeout, int numResultWait) {
4245         waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo,
4246                 settingsTimeout);
4247 
4248         if (!staticInfo.isHardwareLevelAtLeastLimited()) {
4249             // No-op for metadata
4250             return;
4251         }
4252         List<Integer> expectedAeStates = new ArrayList<Integer>();
4253         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED));
4254         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED));
4255         waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
4256                 numResultWait, settingsTimeout);
4257     }
4258 
4259     /**
4260      * Wait for enough results for settings to be applied
4261      *
4262      * @param resultListener The capture listener to get capture result back.
4263      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4264      *                                       unknown.
4265      * @param staticInfo corresponding camera device static metadata.
4266      * @param timeout wait timeout in ms.
4267      */
waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)4268     public static void waitForSettingsApplied(SimpleCaptureCallback resultListener,
4269             int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) {
4270         int maxLatency = staticInfo.getSyncMaxLatency();
4271         if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
4272             maxLatency = numResultWaitForUnknownLatency;
4273         }
4274         // Wait for settings to take effect
4275         waitForNumResults(resultListener, maxLatency, timeout);
4276     }
4277 
getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)4278     public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId,
4279             long frameDuration, StaticMetadata staticInfo) {
4280         // Add 0.05 here so Fps like 29.99 evaluated to 30
4281         int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f);
4282         boolean foundConstantMaxYUVRange = false;
4283         boolean foundYUVStreamingRange = false;
4284         boolean isExternalCamera = staticInfo.isExternalCamera();
4285         boolean isNIR = staticInfo.isNIRColorFilter();
4286 
4287         // Find suitable target FPS range - as high as possible that covers the max YUV rate
4288         // Also verify that there's a good preview rate as well
4289         List<Range<Integer> > fpsRanges = Arrays.asList(
4290                 staticInfo.getAeAvailableTargetFpsRangesChecked());
4291         Range<Integer> targetRange = null;
4292         for (Range<Integer> fpsRange : fpsRanges) {
4293             if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
4294                 foundConstantMaxYUVRange = true;
4295                 targetRange = fpsRange;
4296             } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) {
4297                 targetRange = fpsRange;
4298             }
4299             if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
4300                 foundYUVStreamingRange = true;
4301             }
4302 
4303         }
4304 
4305         if (!isExternalCamera) {
4306             assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported",
4307                     cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange);
4308         }
4309 
4310         if (!isNIR) {
4311             assertTrue(String.format(
4312                     "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
4313                     cameraId, minBurstFps), foundYUVStreamingRange);
4314         }
4315         return targetRange;
4316     }
4317     /**
4318      * Get the candidate supported zoom ratios for testing
4319      *
4320      * <p>
4321      * This function returns the bounary values of supported zoom ratio range in addition to 1.0x
4322      * zoom ratio.
4323      * </p>
4324      */
getCandidateZoomRatios(StaticMetadata staticInfo)4325     public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) {
4326         List<Float> zoomRatios = new ArrayList<Float>();
4327         Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked();
4328         zoomRatios.add(zoomRatioRange.getLower());
4329         if (zoomRatioRange.contains(1.0f) &&
4330                 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD &&
4331                 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) {
4332             zoomRatios.add(1.0f);
4333         }
4334         zoomRatios.add(zoomRatioRange.getUpper());
4335 
4336         return zoomRatios;
4337     }
4338 
4339     /**
4340      * Get the primary rear facing camera from an ID list
4341      */
getPrimaryRearCamera(CameraManager manager, String[] cameraIds)4342     public static String getPrimaryRearCamera(CameraManager manager, String[] cameraIds)
4343             throws Exception {
4344         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_BACK);
4345     }
4346 
4347     /**
4348      * Get the primary front facing camera from an ID list
4349      */
getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)4350     public static String getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)
4351             throws Exception {
4352         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_FRONT);
4353     }
4354 
getPrimaryCamera(CameraManager manager, String[] cameraIds, Integer facing)4355     private static String getPrimaryCamera(CameraManager manager,
4356             String[] cameraIds, Integer facing) throws Exception {
4357         if (cameraIds == null) {
4358             return null;
4359         }
4360 
4361         for (String id : cameraIds) {
4362             if (isPrimaryCamera(manager, id, facing)) {
4363                 return id;
4364             }
4365         }
4366 
4367         return null;
4368     }
4369 
4370     /**
4371      * Check whether a camera Id is a primary rear facing camera
4372      */
isPrimaryRearFacingCamera(CameraManager manager, String cameraId)4373     public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId)
4374             throws Exception {
4375         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK);
4376     }
4377 
4378     /**
4379      * Check whether a camera Id is a primary front facing camera
4380      */
isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)4381     public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)
4382             throws Exception {
4383         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT);
4384     }
4385 
isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)4386     private static boolean isPrimaryCamera(CameraManager manager, String cameraId,
4387             Integer lensFacing) throws Exception {
4388         CameraCharacteristics characteristics;
4389         Integer facing;
4390 
4391         String [] ids = manager.getCameraIdList();
4392         for (String id : ids) {
4393             characteristics = manager.getCameraCharacteristics(id);
4394             facing = characteristics.get(CameraCharacteristics.LENS_FACING);
4395             if (lensFacing.equals(facing)) {
4396                 if (cameraId.equals(id)) {
4397                     return true;
4398                 } else {
4399                     return false;
4400                 }
4401             }
4402         }
4403         return false;
4404     }
4405 
4406     /**
4407      * Verifies the camera in this listener was opened and then unconfigured exactly once.
4408      *
4409      * <p>This assumes that no other action to the camera has been done (e.g.
4410      * it hasn't been configured, or closed, or disconnected). Verification is
4411      * performed immediately without any timeouts.</p>
4412      *
4413      * <p>This checks that the state has previously changed first for opened and then unconfigured.
4414      * Any other state transitions will fail. A test failure is thrown if verification fails.</p>
4415      *
4416      * @param cameraId Camera identifier
4417      * @param listener Listener which was passed to {@link CameraManager#openCamera}
4418      *
4419      * @return The camera device (non-{@code null}).
4420      */
verifyCameraStateOpened(String cameraId, MockStateCallback listener)4421     public static CameraDevice verifyCameraStateOpened(String cameraId,
4422             MockStateCallback listener) {
4423         ArgumentCaptor<CameraDevice> argument =
4424                 ArgumentCaptor.forClass(CameraDevice.class);
4425         InOrder inOrder = inOrder(listener);
4426 
4427         /**
4428          * State transitions (in that order):
4429          *  1) onOpened
4430          *
4431          * No other transitions must occur for successful #openCamera
4432          */
4433         inOrder.verify(listener)
4434                 .onOpened(argument.capture());
4435 
4436         CameraDevice camera = argument.getValue();
4437         assertNotNull(
4438                 String.format("Failed to open camera device ID: %s", cameraId),
4439                 camera);
4440 
4441         // Do not use inOrder here since that would skip anything called before onOpened
4442         verifyNoMoreInteractions(listener);
4443 
4444         return camera;
4445     }
4446 
verifySingleAvailabilityCbsReceived( LinkedBlockingQueue<String> expectedEventQueue, LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId, String expectedStr, String unExpectedStr)4447     public static void verifySingleAvailabilityCbsReceived(
4448             LinkedBlockingQueue<String> expectedEventQueue,
4449             LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId,
4450             String expectedStr, String unExpectedStr) throws Exception {
4451         String candidateId = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4452                 java.util.concurrent.TimeUnit.MILLISECONDS);
4453         assertNotNull("No " + expectedStr + " notice for expected ID " + expectedId, candidateId);
4454         assertTrue("Received " + expectedStr + " notice for wrong ID, " + "expected "
4455                 + expectedId + ", got " + candidateId, expectedId.equals(candidateId));
4456         assertTrue("Received >  1 " + expectedStr + " callback for id " + expectedId,
4457                 expectedEventQueue.size() == 0);
4458         assertTrue(unExpectedStr + " events received unexpectedly",
4459                 unExpectedEventQueue.size() == 0);
4460     }
4461 
verifyAvailabilityCbsReceived(HashSet<T> expectedCameras, LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue, boolean available)4462     public static <T> void verifyAvailabilityCbsReceived(HashSet<T> expectedCameras,
4463             LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue,
4464             boolean available) throws Exception {
4465         while (expectedCameras.size() > 0) {
4466             T id = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4467                     java.util.concurrent.TimeUnit.MILLISECONDS);
4468             assertTrue("Did not receive initial " + (available ? "available" : "unavailable")
4469                     + " notices for some cameras", id != null);
4470             assertTrue("Received initial " + (available ? "available" : "unavailable")
4471                     + " notice for wrong camera " + id, expectedCameras.contains(id));
4472             expectedCameras.remove(id);
4473         }
4474         // Verify no unexpected unavailable/available cameras were reported
4475         if (unExpectedEventQueue != null) {
4476             assertTrue("Received unexpected initial "
4477                     + (available ? "unavailable" : "available"),
4478                     unExpectedEventQueue.size() == 0);
4479         }
4480     }
4481 
4482     /**
4483      * This function polls on the event queue to get unavailable physical camera IDs belonging
4484      * to a particular logical camera. The event queue is drained before the function returns.
4485      *
4486      * @param queue The event queue capturing unavailable physical cameras
4487      * @param cameraId The logical camera ID
4488      *
4489      * @return The currently unavailable physical cameras
4490      */
getUnavailablePhysicalCamerasAndDrain( LinkedBlockingQueue<Pair<String, String>> queue, String cameraId)4491     private static Set<String> getUnavailablePhysicalCamerasAndDrain(
4492             LinkedBlockingQueue<Pair<String, String>> queue, String cameraId) throws Exception {
4493         Set<String> unavailablePhysicalCameras = new HashSet<String>();
4494 
4495         while (true) {
4496             Pair<String, String> unavailableIdCombo = queue.poll(
4497                     AVAILABILITY_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
4498             if (unavailableIdCombo == null) {
4499                 // No more entries in the queue. Break out of the loop and return.
4500                 break;
4501             }
4502 
4503             if (cameraId.equals(unavailableIdCombo.first)) {
4504                 unavailablePhysicalCameras.add(unavailableIdCombo.second);
4505             }
4506         }
4507 
4508         return unavailablePhysicalCameras;
4509     }
4510 
4511     /**
4512      * Get the unavailable physical cameras based on onPhysicalCameraUnavailable callback.
4513      */
getUnavailablePhysicalCameras(CameraManager manager, Handler handler)4514     public static Set<Pair<String, String>> getUnavailablePhysicalCameras(CameraManager manager,
4515             Handler handler) throws Exception {
4516         final Set<Pair<String, String>> ret = new HashSet<>();
4517         final ConditionVariable cv = new ConditionVariable();
4518 
4519         CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
4520             @Override
4521             public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
4522                 synchronized (ret) {
4523                     ret.add(new Pair<String, String>(cameraId, physicalCameraId));
4524                 }
4525                 cv.open();
4526             }
4527         };
4528         manager.registerAvailabilityCallback(ac, handler);
4529 
4530         // Wait for next physical camera availability callback
4531         while (cv.block(AVAILABILITY_TIMEOUT_MS)) {
4532             // cv.block() returns true when open() is called
4533             // false on timeout.
4534             cv.close();
4535         }
4536 
4537         manager.unregisterAvailabilityCallback(ac);
4538 
4539         synchronized (ret) {
4540             return ret;
4541         }
4542     }
4543 
testPhysicalCameraAvailabilityConsistencyHelper( String[] cameraIds, CameraManager manager, Handler handler, boolean expectInitialCallbackAfterOpen)4544     public static void testPhysicalCameraAvailabilityConsistencyHelper(
4545             String[] cameraIds, CameraManager manager,
4546             Handler handler, boolean expectInitialCallbackAfterOpen) throws Throwable {
4547         final LinkedBlockingQueue<String> availableEventQueue = new LinkedBlockingQueue<>();
4548         final LinkedBlockingQueue<String> unavailableEventQueue = new LinkedBlockingQueue<>();
4549         final LinkedBlockingQueue<Pair<String, String>> unavailablePhysicalCamEventQueue =
4550                 new LinkedBlockingQueue<>();
4551         CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
4552             @Override
4553             public void onCameraAvailable(String cameraId) {
4554                 super.onCameraAvailable(cameraId);
4555                 availableEventQueue.offer(cameraId);
4556             }
4557 
4558             @Override
4559             public void onCameraUnavailable(String cameraId) {
4560                 super.onCameraUnavailable(cameraId);
4561                 unavailableEventQueue.offer(cameraId);
4562             }
4563 
4564             @Override
4565             public void onPhysicalCameraAvailable(String cameraId, String physicalCameraId) {
4566                 super.onPhysicalCameraAvailable(cameraId, physicalCameraId);
4567                 unavailablePhysicalCamEventQueue.remove(new Pair<>(cameraId, physicalCameraId));
4568             }
4569 
4570             @Override
4571             public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
4572                 super.onPhysicalCameraUnavailable(cameraId, physicalCameraId);
4573                 unavailablePhysicalCamEventQueue.offer(new Pair<>(cameraId, physicalCameraId));
4574             }
4575         };
4576 
4577         String[] cameras = cameraIds;
4578         if (cameras.length == 0) {
4579             Log.i(TAG, "Skipping testPhysicalCameraAvailabilityConsistency, no cameras");
4580             return;
4581         }
4582 
4583         for (String cameraId : cameras) {
4584             CameraCharacteristics ch = manager.getCameraCharacteristics(cameraId);
4585             StaticMetadata staticInfo = new StaticMetadata(ch);
4586             if (!staticInfo.isLogicalMultiCamera()) {
4587                 // Test is only applicable for logical multi-camera.
4588                 continue;
4589             }
4590 
4591             // Get initial physical unavailable callbacks without opening camera
4592             manager.registerAvailabilityCallback(ac, handler);
4593             Set<String> unavailablePhysicalCameras = getUnavailablePhysicalCamerasAndDrain(
4594                     unavailablePhysicalCamEventQueue, cameraId);
4595 
4596             // Open camera
4597             MockStateCallback mockListener = MockStateCallback.mock();
4598             BlockingStateCallback cameraListener = new BlockingStateCallback(mockListener);
4599             manager.openCamera(cameraId, cameraListener, handler);
4600             // Block until opened
4601             cameraListener.waitForState(BlockingStateCallback.STATE_OPENED,
4602                     CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
4603             // Then verify only open happened, and get the camera handle
4604             CameraDevice camera = CameraTestUtils.verifyCameraStateOpened(cameraId, mockListener);
4605 
4606             // The camera should be in available->unavailable state.
4607             String candidateUnavailableId = unavailableEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4608                     java.util.concurrent.TimeUnit.MILLISECONDS);
4609             assertNotNull("No unavailable notice for expected ID " + cameraId,
4610                     candidateUnavailableId);
4611             assertTrue("Received unavailable notice for wrong ID, "
4612                     + "expected " + cameraId + ", got " + candidateUnavailableId,
4613                     cameraId.equals(candidateUnavailableId));
4614             assertTrue("Received >  1 unavailable callback for id " + cameraId,
4615                     unavailableEventQueue.size() == 0);
4616             availableEventQueue.clear();
4617             unavailableEventQueue.clear();
4618 
4619             manager.unregisterAvailabilityCallback(ac);
4620             // Get physical unavailable callbacks while camera is open
4621             manager.registerAvailabilityCallback(ac, handler);
4622             HashSet<String> expectedAvailableCameras = new HashSet<String>(Arrays.asList(cameras));
4623             expectedAvailableCameras.remove(cameraId);
4624             HashSet<String> expectedUnavailableCameras =
4625                     new HashSet<String>(Arrays.asList(cameraId));
4626             CameraTestUtils.verifyAvailabilityCbsReceived(expectedAvailableCameras,
4627                     availableEventQueue, null, /*available*/ true);
4628             CameraTestUtils.verifyAvailabilityCbsReceived(expectedUnavailableCameras,
4629                     unavailableEventQueue, null, /*available*/ false);
4630             Set<String> unavailablePhysicalCamerasWhileOpen = getUnavailablePhysicalCamerasAndDrain(
4631                     unavailablePhysicalCamEventQueue, cameraId);
4632             if (expectInitialCallbackAfterOpen) {
4633                 assertTrue("The unavailable physical cameras must be the same between before open "
4634                         + unavailablePhysicalCameras.toString()  + " and after open "
4635                         + unavailablePhysicalCamerasWhileOpen.toString(),
4636                         unavailablePhysicalCameras.equals(unavailablePhysicalCamerasWhileOpen));
4637             } else {
4638                 assertTrue("The physical camera unavailability callback must not be called when "
4639                         + "the logical camera is open",
4640                         unavailablePhysicalCamerasWhileOpen.isEmpty());
4641             }
4642 
4643             // Close camera device
4644             camera.close();
4645             cameraListener.waitForState(BlockingStateCallback.STATE_CLOSED,
4646                     CameraTestUtils.CAMERA_CLOSE_TIMEOUT_MS);
4647             CameraTestUtils.verifySingleAvailabilityCbsReceived(availableEventQueue,
4648                     unavailableEventQueue, cameraId, "availability", "Unavailability");
4649 
4650             // Get physical unavailable callbacks after opening and closing camera
4651             Set<String> unavailablePhysicalCamerasAfterClose =
4652                     getUnavailablePhysicalCamerasAndDrain(
4653                             unavailablePhysicalCamEventQueue, cameraId);
4654 
4655             assertTrue("The unavailable physical cameras must be the same between before open "
4656                     + unavailablePhysicalCameras.toString()  + " and after close "
4657                     + unavailablePhysicalCamerasAfterClose.toString(),
4658                     unavailablePhysicalCameras.equals(unavailablePhysicalCamerasAfterClose));
4659 
4660             manager.unregisterAvailabilityCallback(ac);
4661         }
4662     }
4663 
4664     /**
4665      * Simple holder for resolutions to use for different camera outputs and size limits.
4666      */
4667     public static class MaxStreamSizes {
4668         // Format shorthands
4669         static final int PRIV = ImageFormat.PRIVATE;
4670         static final int JPEG = ImageFormat.JPEG;
4671         static final int YUV  = ImageFormat.YUV_420_888;
4672         static final int RAW  = ImageFormat.RAW_SENSOR;
4673         static final int Y8   = ImageFormat.Y8;
4674         static final int HEIC = ImageFormat.HEIC;
4675         static final int JPEG_R = ImageFormat.JPEG_R;
4676 
4677         // Max resolution output indices
4678         static final int PREVIEW = 0;
4679         static final int RECORD  = 1;
4680         static final int MAXIMUM = 2;
4681         static final int VGA = 3;
4682         static final int VGA_FULL_FOV = 4;
4683         static final int MAX_30FPS = 5;
4684         static final int S720P = 6;
4685         static final int S1440P_4_3 = 7; // 4:3
4686         static final int MAX_RES = 8;
4687         static final int S1080P = 9;
4688         static final int S1080P_4_3 = 10;
4689         static final int S1440P_16_9 = 11;
4690         static final int XVGA = 12;
4691         static final int MAXIMUM_16_9 = 13;
4692         static final int MAXIMUM_4_3 = 14;
4693         static final int UHD = 15;
4694         static final int RESOLUTION_COUNT = 16;
4695 
4696         // Max resolution input indices
4697         static final int INPUT_MAXIMUM = 0;
4698         static final int INPUT_MAX_RES = 1;
4699         static final int INPUT_RESOLUTION_COUNT = 2;
4700 
4701         static final Size S_1280_720 = new Size(1280, 720);   // 16:9
4702 
4703         static final Size S_1024_768 = new Size(1024, 768);   // 4:3
4704 
4705         static final Size S_1920_1080 = new Size(1920, 1080); // 16:9
4706 
4707         static final Size S_1440_1080 = new Size(1440, 1080); // 4:3
4708 
4709         static final Size S_2560_1440 = new Size(2560, 1440); // 16:9
4710 
4711         static final Size S_1920_1440 = new Size(1920, 1440); // 4:3
4712 
4713         static final Size S_3840_2160 = new Size(3840, 2160); // 16:9
4714 
4715         static final long FRAME_DURATION_30FPS_NSEC = (long) 1e9 / 30;
4716 
4717         static final int USE_CASE_PREVIEW =
4718                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW;
4719         static final int USE_CASE_VIDEO_RECORD =
4720                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD;
4721         static final int USE_CASE_STILL_CAPTURE =
4722                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE;
4723         static final int USE_CASE_PREVIEW_VIDEO_STILL =
4724                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
4725         static final int USE_CASE_VIDEO_CALL =
4726                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
4727         static final int USE_CASE_CROPPED_RAW =
4728                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
4729 
4730         // Note: This must match the required stream combinations defined in
4731         // CameraCharacteristcs#INFO_SESSION_CONFIGURATION_QUERY_VERSION.
4732         private static final int[][] QUERY_COMBINATIONS = {
4733             {PRIV, S1080P},
4734             {PRIV, S720P},
4735             {PRIV, S1080P,  JPEG, MAXIMUM_16_9},
4736             {PRIV, S1080P,  JPEG, UHD},
4737             {PRIV, S1080P,  JPEG, S1440P_16_9},
4738             {PRIV, S1080P,  JPEG, S1080P},
4739             {PRIV, S1080P,  PRIV, UHD},
4740             {PRIV, S720P,   JPEG, MAXIMUM_16_9},
4741             {PRIV, S720P,   JPEG, UHD},
4742             {PRIV, S720P,   JPEG, S1080P},
4743             {PRIV, XVGA,    JPEG, MAXIMUM_4_3},
4744             {PRIV, S1080P_4_3, JPEG, MAXIMUM_4_3},
4745             {PRIV, S1080P,  JPEG_R, MAXIMUM_16_9},
4746             {PRIV, S1080P,  JPEG_R, UHD},
4747             {PRIV, S1080P,  JPEG_R, S1440P_16_9},
4748             {PRIV, S1080P,  JPEG_R, S1080P},
4749             {PRIV, S720P,   JPEG_R, MAXIMUM_16_9},
4750             {PRIV, S720P,   JPEG_R, UHD},
4751             {PRIV, S720P,   JPEG_R, S1080P},
4752             {PRIV, XVGA,    JPEG_R, MAXIMUM_4_3},
4753             {PRIV, S1080P_4_3, JPEG_R, MAXIMUM_4_3},
4754         };
4755 
4756         private final Size[] mMaxPrivSizes = new Size[RESOLUTION_COUNT];
4757         private final Size[] mMaxJpegSizes = new Size[RESOLUTION_COUNT];
4758         private final Size[] mMaxJpegRSizes = new Size[RESOLUTION_COUNT];
4759         private final Size[] mMaxYuvSizes = new Size[RESOLUTION_COUNT];
4760         private final Size[] mMaxY8Sizes = new Size[RESOLUTION_COUNT];
4761         private final Size[] mMaxHeicSizes = new Size[RESOLUTION_COUNT];
4762         private final Size mMaxRawSize;
4763         private final Size mMaxResolutionRawSize;
4764 
4765         private final Size[] mMaxPrivInputSizes = new Size[INPUT_RESOLUTION_COUNT];
4766         private final Size[] mMaxYuvInputSizes = new Size[INPUT_RESOLUTION_COUNT];
4767         private final Size mMaxInputY8Size;
4768         private int[][] mQueryableCombinations;
4769 
MaxStreamSizes(StaticMetadata sm, String cameraId, Context context)4770         public MaxStreamSizes(StaticMetadata sm, String cameraId, Context context) {
4771             this(sm, cameraId, context, /*matchSize*/false);
4772         }
4773 
MaxStreamSizes(StaticMetadata sm, String cameraId, Context context, boolean matchSize)4774         public MaxStreamSizes(StaticMetadata sm, String cameraId, Context context,
4775                 boolean matchSize) {
4776             Size[] privSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.PRIVATE,
4777                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4778             Size[] yuvSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.YUV_420_888,
4779                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4780 
4781             Size[] y8Sizes = sm.getAvailableSizesForFormatChecked(ImageFormat.Y8,
4782                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4783             Size[] jpegSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
4784                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4785             Size[] jpegRSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.JPEG_R,
4786                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4787             Size[] rawSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.RAW_SENSOR,
4788                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4789             Size[] heicSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.HEIC,
4790                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4791 
4792             Size maxPreviewSize = getMaxPreviewSize(context, cameraId);
4793 
4794             StreamConfigurationMap configs = sm.getCharacteristics().get(
4795                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
4796 
4797             StreamConfigurationMap maxResConfigs = sm.getCharacteristics().get(
4798                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION);
4799 
4800             mMaxRawSize = (rawSizes.length != 0) ? CameraTestUtils.getMaxSize(rawSizes) : null;
4801             mMaxResolutionRawSize = sm.isUltraHighResolutionSensor()
4802                     ? CameraTestUtils.getMaxSize(
4803                             maxResConfigs.getOutputSizes(ImageFormat.RAW_SENSOR))
4804                     : null;
4805 
4806             if (sm.isColorOutputSupported()) {
4807                 // We don't include JPEG sizes capped at PREVIEW since for MPC 12+ devices, JPEG
4808                 // sizes are necessarily > 1080p. Also the mandatory stream combinations have no
4809                 // JPEG streams capped at PREVIEW.
4810                 mMaxPrivSizes[PREVIEW] = CameraTestUtils.getMaxSizeWithBound(privSizes,
4811                         maxPreviewSize);
4812                 mMaxYuvSizes[PREVIEW]  = CameraTestUtils.getMaxSizeWithBound(yuvSizes,
4813                         maxPreviewSize);
4814 
4815                 if (sm.isExternalCamera()) {
4816                     mMaxPrivSizes[RECORD] = getMaxExternalRecordingSize(cameraId, configs);
4817                     mMaxYuvSizes[RECORD]  = getMaxExternalRecordingSize(cameraId, configs);
4818                     mMaxJpegSizes[RECORD] = getMaxExternalRecordingSize(cameraId, configs);
4819                 } else {
4820                     mMaxPrivSizes[RECORD] = getMaxRecordingSize(cameraId);
4821                     mMaxYuvSizes[RECORD]  = getMaxRecordingSize(cameraId);
4822                     mMaxJpegSizes[RECORD] = getMaxRecordingSize(cameraId);
4823                 }
4824 
4825                 if (sm.isUltraHighResolutionSensor()) {
4826                     mMaxYuvSizes[MAX_RES] = CameraTestUtils.getMaxSize(
4827                             maxResConfigs.getOutputSizes(ImageFormat.YUV_420_888));
4828                     mMaxJpegSizes[MAX_RES] = CameraTestUtils.getMaxSize(
4829                             maxResConfigs.getOutputSizes(ImageFormat.JPEG));
4830                 }
4831 
4832                 mMaxPrivSizes[MAXIMUM] = CameraTestUtils.getMaxSize(privSizes);
4833                 mMaxYuvSizes[MAXIMUM] = CameraTestUtils.getMaxSize(yuvSizes);
4834                 mMaxJpegSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegSizes);
4835 
4836                 float aspectRatio43 = 1.0f * 4 / 3;
4837                 mMaxPrivSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(privSizes, aspectRatio43);
4838                 mMaxYuvSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(yuvSizes, aspectRatio43);
4839                 mMaxJpegSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(jpegSizes, aspectRatio43);
4840 
4841                 float aspectRatio169 = 1.0f * 16 / 9;
4842                 mMaxPrivSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(privSizes, aspectRatio169);
4843                 mMaxYuvSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(yuvSizes, aspectRatio169);
4844                 mMaxJpegSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(jpegSizes, aspectRatio169);
4845 
4846                 // Must always be supported, add unconditionally
4847                 final Size vgaSize = new Size(640, 480);
4848                 mMaxPrivSizes[VGA] = vgaSize;
4849                 mMaxYuvSizes[VGA] = vgaSize;
4850                 mMaxJpegSizes[VGA] = vgaSize;
4851 
4852                 final Size s1440p43Size = S_1920_1440;
4853                 mMaxPrivSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4854                         configs.getOutputSizes(ImageFormat.PRIVATE), s1440p43Size);
4855                 mMaxYuvSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4856                         configs.getOutputSizes(ImageFormat.YUV_420_888), s1440p43Size);
4857                 mMaxJpegSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4858                         configs.getOutputSizes(ImageFormat.JPEG), s1440p43Size);
4859 
4860                 final Size s720pSize = S_1280_720;
4861                 final Size xvgaSize = S_1024_768;
4862                 final Size s1080pSize = S_1920_1080;
4863                 final Size s1080p43Size = S_1440_1080;
4864                 final Size s1440p169Size = S_2560_1440;
4865                 final Size uhdSize = S_3840_2160;
4866                 if (!matchSize) {
4867                     // Skip JPEG for 720p, XVGA, and S1080P_4_3, because those resolutions
4868                     // are not mandatory JPEG resolutions, and they could be filtered out
4869                     // for MediaPerformance class.
4870                     mMaxPrivSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4871                             configs.getOutputSizes(ImageFormat.PRIVATE), s720pSize);
4872                     mMaxYuvSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4873                             configs.getOutputSizes(ImageFormat.YUV_420_888), s720pSize);
4874 
4875                     mMaxPrivSizes[XVGA] = CameraTestUtils.getMaxSizeWithBound(
4876                             configs.getOutputSizes(ImageFormat.PRIVATE), xvgaSize);
4877                     mMaxYuvSizes[XVGA] = CameraTestUtils.getMaxSizeWithBound(
4878                             configs.getOutputSizes(ImageFormat.YUV_420_888), xvgaSize);
4879 
4880                     mMaxPrivSizes[S1080P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4881                             configs.getOutputSizes(ImageFormat.PRIVATE), s1080p43Size);
4882                     mMaxYuvSizes[S1080P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4883                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1080p43Size);
4884 
4885                     mMaxPrivSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4886                             configs.getOutputSizes(ImageFormat.PRIVATE), s1080pSize);
4887                     mMaxYuvSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4888                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1080pSize);
4889                     mMaxJpegSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4890                             configs.getOutputSizes(ImageFormat.JPEG), s1080pSize);
4891 
4892                     mMaxPrivSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4893                             configs.getOutputSizes(ImageFormat.PRIVATE), s1440p169Size);
4894                     mMaxYuvSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4895                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1440p169Size);
4896                     mMaxJpegSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4897                             configs.getOutputSizes(ImageFormat.JPEG), s1440p169Size);
4898 
4899                     mMaxPrivSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4900                             configs.getOutputSizes(ImageFormat.PRIVATE), uhdSize);
4901                     mMaxYuvSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4902                             configs.getOutputSizes(ImageFormat.YUV_420_888), uhdSize);
4903                     mMaxJpegSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4904                             configs.getOutputSizes(ImageFormat.JPEG), uhdSize);
4905                 } else {
4906                     mMaxPrivSizes[S720P] = s720pSize;
4907                     mMaxYuvSizes[S720P] = s720pSize;
4908                     mMaxJpegSizes[S720P] = s720pSize;
4909 
4910                     mMaxPrivSizes[XVGA] = xvgaSize;
4911                     mMaxYuvSizes[XVGA] = xvgaSize;
4912                     mMaxJpegSizes[XVGA] = xvgaSize;
4913 
4914                     mMaxPrivSizes[S1080P] = s1080pSize;
4915                     mMaxYuvSizes[S1080P] = s1080pSize;
4916                     mMaxJpegSizes[S1080P] = s1080pSize;
4917 
4918                     mMaxPrivSizes[S1080P_4_3] = s1080p43Size;
4919                     mMaxYuvSizes[S1080P_4_3] = s1080p43Size;
4920                     mMaxJpegSizes[S1080P_4_3] = s1080p43Size;
4921 
4922                     mMaxPrivSizes[S1440P_16_9] = s1440p169Size;
4923                     mMaxYuvSizes[S1440P_16_9] = s1440p169Size;
4924                     mMaxJpegSizes[S1440P_16_9] = s1440p169Size;
4925 
4926                     mMaxPrivSizes[UHD] = uhdSize;
4927                     mMaxYuvSizes[UHD] = uhdSize;
4928                     mMaxJpegSizes[UHD] = uhdSize;
4929                 }
4930                 if (sm.isJpegRSupported()) {
4931                     mMaxJpegRSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegRSizes);
4932                     mMaxJpegRSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(
4933                             jpegRSizes, aspectRatio43);
4934                     mMaxJpegRSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(
4935                             jpegRSizes, aspectRatio169);
4936                     if (!matchSize) {
4937                         mMaxJpegRSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4938                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1080pSize);
4939                         mMaxJpegRSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4940                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1440p169Size);
4941                         mMaxJpegRSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4942                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1440p43Size);
4943                         mMaxJpegRSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4944                                 configs.getOutputSizes(ImageFormat.JPEG_R), uhdSize);
4945                     } else {
4946                         mMaxJpegRSizes[S720P] = s720pSize;
4947                         mMaxJpegRSizes[XVGA] = xvgaSize;
4948                         mMaxJpegRSizes[S1080P] = s1080pSize;
4949                         mMaxJpegRSizes[S1080P_4_3] = s1080p43Size;
4950                         mMaxJpegRSizes[S1440P_16_9] = s1440p169Size;
4951                         mMaxJpegRSizes[UHD] = uhdSize;
4952                     }
4953                     mQueryableCombinations = QUERY_COMBINATIONS;
4954                 } else {
4955                     // JPEG_R is not supported. Remove all combinations containing JPEG_R
4956                     List<int[]> combinationsMinusJpegR = new ArrayList<int[]>();
4957                     for (int i = 0; i < QUERY_COMBINATIONS.length; i++) {
4958                         boolean hasJpegR = false;
4959                         for (int j = 0; j < QUERY_COMBINATIONS[i].length; j += 2) {
4960                             if (QUERY_COMBINATIONS[i][j] == JPEG_R) {
4961                                 hasJpegR = true;
4962                                 break;
4963                             }
4964                         }
4965 
4966                         if (!hasJpegR) {
4967                             combinationsMinusJpegR.add(QUERY_COMBINATIONS[i]);
4968                         }
4969                     }
4970                     mQueryableCombinations = combinationsMinusJpegR.toArray(int[][]::new);
4971                 }
4972 
4973                 if (sm.isMonochromeWithY8()) {
4974                     mMaxY8Sizes[PREVIEW]  = CameraTestUtils.getMaxSizeWithBound(
4975                             y8Sizes, maxPreviewSize);
4976                     if (sm.isExternalCamera()) {
4977                         mMaxY8Sizes[RECORD]  = getMaxExternalRecordingSize(cameraId, configs);
4978                     } else {
4979                         mMaxY8Sizes[RECORD]  = getMaxRecordingSize(cameraId);
4980                     }
4981                     mMaxY8Sizes[MAXIMUM] = CameraTestUtils.getMaxSize(y8Sizes);
4982                     mMaxY8Sizes[VGA] = vgaSize;
4983                     mMaxY8Sizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4984                             configs.getOutputSizes(ImageFormat.Y8), s720pSize);
4985                     mMaxY8Sizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4986                             configs.getOutputSizes(ImageFormat.Y8), s1440p43Size);
4987                 }
4988 
4989                 if (sm.isHeicSupported()) {
4990                     mMaxHeicSizes[PREVIEW] = CameraTestUtils.getMaxSizeWithBound(
4991                             heicSizes, maxPreviewSize);
4992                     mMaxHeicSizes[RECORD] = getMaxRecordingSize(cameraId);
4993                     mMaxHeicSizes[MAXIMUM] = CameraTestUtils.getMaxSize(heicSizes);
4994                     mMaxHeicSizes[VGA] = vgaSize;
4995                     mMaxHeicSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4996                             configs.getOutputSizes(ImageFormat.HEIC), s720pSize);
4997                     mMaxHeicSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4998                             configs.getOutputSizes(ImageFormat.HEIC), s1440p43Size);
4999                 }
5000             }
5001             if (sm.isColorOutputSupported() && !sm.isHardwareLevelLegacy()) {
5002                 // VGA resolution, but with aspect ratio matching full res FOV
5003                 float fullFovAspect = mMaxYuvSizes[MAXIMUM].getWidth()
5004                         / (float) mMaxYuvSizes[MAXIMUM].getHeight();
5005                 Size vgaFullFovSize = new Size(640, (int) (640 / fullFovAspect));
5006 
5007                 mMaxPrivSizes[VGA_FULL_FOV] = vgaFullFovSize;
5008                 mMaxYuvSizes[VGA_FULL_FOV] = vgaFullFovSize;
5009                 mMaxJpegSizes[VGA_FULL_FOV] = vgaFullFovSize;
5010                 if (sm.isMonochromeWithY8()) {
5011                     mMaxY8Sizes[VGA_FULL_FOV] = vgaFullFovSize;
5012                 }
5013 
5014                 // Max resolution that runs at 30fps
5015 
5016                 Size maxPriv30fpsSize = null;
5017                 Size maxYuv30fpsSize = null;
5018                 Size maxY830fpsSize = null;
5019                 Size maxJpeg30fpsSize = null;
5020                 Comparator<Size> comparator = new SizeComparator();
5021                 for (Map.Entry<Size, Long> e :
5022                              sm.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.PRIVATE)
5023                              .entrySet()) {
5024                     Size s = e.getKey();
5025                     Long minDuration = e.getValue();
5026                     Log.d(TAG, String.format("Priv Size: %s, duration %d limit %d", s, minDuration,
5027                                 FRAME_DURATION_30FPS_NSEC));
5028                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5029                         if (maxPriv30fpsSize == null
5030                                 || comparator.compare(maxPriv30fpsSize, s) < 0) {
5031                             maxPriv30fpsSize = s;
5032                         }
5033                     }
5034                 }
5035                 assertTrue("No PRIVATE resolution available at 30fps!", maxPriv30fpsSize != null);
5036 
5037                 for (Map.Entry<Size, Long> e :
5038                              sm.getAvailableMinFrameDurationsForFormatChecked(
5039                                      ImageFormat.YUV_420_888)
5040                              .entrySet()) {
5041                     Size s = e.getKey();
5042                     Long minDuration = e.getValue();
5043                     Log.d(TAG, String.format("YUV Size: %s, duration %d limit %d", s, minDuration,
5044                                 FRAME_DURATION_30FPS_NSEC));
5045                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5046                         if (maxYuv30fpsSize == null
5047                                 || comparator.compare(maxYuv30fpsSize, s) < 0) {
5048                             maxYuv30fpsSize = s;
5049                         }
5050                     }
5051                 }
5052                 assertTrue("No YUV_420_888 resolution available at 30fps!",
5053                         maxYuv30fpsSize != null);
5054 
5055                 if (sm.isMonochromeWithY8()) {
5056                     for (Map.Entry<Size, Long> e :
5057                                  sm.getAvailableMinFrameDurationsForFormatChecked(
5058                                          ImageFormat.Y8)
5059                                  .entrySet()) {
5060                         Size s = e.getKey();
5061                         Long minDuration = e.getValue();
5062                         Log.d(TAG, String.format("Y8 Size: %s, duration %d limit %d",
5063                                 s, minDuration, FRAME_DURATION_30FPS_NSEC));
5064                         if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5065                             if (maxY830fpsSize == null
5066                                     || comparator.compare(maxY830fpsSize, s) < 0) {
5067                                 maxY830fpsSize = s;
5068                             }
5069                         }
5070                     }
5071                     assertTrue("No Y8 resolution available at 30fps!", maxY830fpsSize != null);
5072                 }
5073 
5074                 for (Map.Entry<Size, Long> e :
5075                              sm.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.JPEG)
5076                              .entrySet()) {
5077                     Size s = e.getKey();
5078                     Long minDuration = e.getValue();
5079                     Log.d(TAG, String.format("JPEG Size: %s, duration %d limit %d", s, minDuration,
5080                                 FRAME_DURATION_30FPS_NSEC));
5081                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5082                         if (maxJpeg30fpsSize == null
5083                                 || comparator.compare(maxJpeg30fpsSize, s) < 0) {
5084                             maxJpeg30fpsSize = s;
5085                         }
5086                     }
5087                 }
5088                 assertTrue("No JPEG resolution available at 30fps!", maxJpeg30fpsSize != null);
5089 
5090                 mMaxPrivSizes[MAX_30FPS] = maxPriv30fpsSize;
5091                 mMaxYuvSizes[MAX_30FPS] = maxYuv30fpsSize;
5092                 mMaxY8Sizes[MAX_30FPS] = maxY830fpsSize;
5093                 mMaxJpegSizes[MAX_30FPS] = maxJpeg30fpsSize;
5094             }
5095 
5096             Size[] privInputSizes = configs.getInputSizes(ImageFormat.PRIVATE);
5097             mMaxPrivInputSizes[INPUT_MAXIMUM] = privInputSizes != null
5098                     ? CameraTestUtils.getMaxSize(privInputSizes)
5099                     : null;
5100             Size[] maxResPrivInputSizes =
5101                     sm.isUltraHighResolutionSensor()
5102                     ?  maxResConfigs.getInputSizes(ImageFormat.PRIVATE)
5103                     : null;
5104             mMaxPrivInputSizes[INPUT_MAX_RES] = maxResPrivInputSizes != null
5105                     ? CameraTestUtils.getMaxSize(maxResPrivInputSizes)
5106                     : null;
5107 
5108             Size[] yuvInputSizes = configs.getInputSizes(ImageFormat.YUV_420_888);
5109             mMaxYuvInputSizes[INPUT_MAXIMUM] = yuvInputSizes != null
5110                     ? CameraTestUtils.getMaxSize(yuvInputSizes)
5111                     : null;
5112             Size[] maxResYuvInputSizes = sm.isUltraHighResolutionSensor()
5113                     ?  maxResConfigs.getInputSizes(ImageFormat.YUV_420_888)
5114                     : null;
5115             mMaxYuvInputSizes[INPUT_MAX_RES] = maxResYuvInputSizes != null
5116                     ? CameraTestUtils.getMaxSize(maxResYuvInputSizes)
5117                     : null;
5118 
5119             Size[] y8InputSizes = configs.getInputSizes(ImageFormat.Y8);
5120             mMaxInputY8Size = y8InputSizes != null
5121                     ? CameraTestUtils.getMaxSize(y8InputSizes)
5122                     : null;
5123         }
5124 
getOutputSizeForFormat(int format, int resolutionIndex)5125         public final Size getOutputSizeForFormat(int format, int resolutionIndex) {
5126             if (resolutionIndex >= RESOLUTION_COUNT) {
5127                 return new Size(0, 0);
5128             }
5129 
5130             switch (format) {
5131                 case PRIV:
5132                     return mMaxPrivSizes[resolutionIndex];
5133                 case YUV:
5134                     return mMaxYuvSizes[resolutionIndex];
5135                 case JPEG:
5136                     return mMaxJpegSizes[resolutionIndex];
5137                 case JPEG_R:
5138                     return mMaxJpegRSizes[resolutionIndex];
5139                 case Y8:
5140                     return mMaxY8Sizes[resolutionIndex];
5141                 case HEIC:
5142                     return mMaxHeicSizes[resolutionIndex];
5143                 case RAW:
5144                     if (resolutionIndex == MAX_RES) {
5145                         return mMaxResolutionRawSize;
5146                     }
5147                     return mMaxRawSize;
5148                 default:
5149                     return new Size(0, 0);
5150             }
5151         }
5152 
getMaxInputSizeForFormat(int format, int resolutionIndex)5153         public final Size getMaxInputSizeForFormat(int format, int resolutionIndex) {
5154             int inputResolutionIndex = getInputResolutionIndex(resolutionIndex);
5155             if (inputResolutionIndex >= INPUT_RESOLUTION_COUNT || inputResolutionIndex == -1) {
5156                 return new Size(0, 0);
5157             }
5158             switch (format) {
5159                 case PRIV:
5160                     return mMaxPrivInputSizes[inputResolutionIndex];
5161                 case YUV:
5162                     return mMaxYuvInputSizes[inputResolutionIndex];
5163                 case Y8:
5164                     return mMaxInputY8Size;
5165                 case RAW:
5166                     return mMaxResolutionRawSize;
5167                 default:
5168                     return new Size(0, 0);
5169             }
5170         }
5171 
combinationToString(int[] combination)5172         public static String combinationToString(int[] combination) {
5173             return combinationToString(combination, /*useCaseSpecified*/ false);
5174         }
5175 
combinationToString(int[] combination, boolean useCaseSpecified)5176         public static String combinationToString(int[] combination, boolean useCaseSpecified) {
5177             StringBuilder b = new StringBuilder("{ ");
5178             int i = 0;
5179             while (i < combination.length) {
5180                 int format = combination[i];
5181                 int sizeLimit = combination[i + 1];
5182 
5183                 appendFormatSize(b, format, sizeLimit);
5184                 if (useCaseSpecified) {
5185                     int streamUseCase = combination[i + 2];
5186                     appendStreamUseCase(b, streamUseCase);
5187                     i += 1;
5188                 }
5189                 i += 2;
5190                 b.append(" ");
5191             }
5192             b.append("}");
5193             return b.toString();
5194         }
5195 
reprocessCombinationToString(int[] reprocessCombination)5196         public static String reprocessCombinationToString(int[] reprocessCombination) {
5197             // reprocessConfig[0..1] is the input configuration
5198             StringBuilder b = new StringBuilder("Input: ");
5199             appendFormatSize(b, reprocessCombination[0], reprocessCombination[1]);
5200 
5201             // reprocessCombnation[0..1] is also output combination to be captured as reprocess
5202             // input.
5203             b.append(", Outputs: { ");
5204             for (int i = 0; i < reprocessCombination.length; i += 2) {
5205                 int format = reprocessCombination[i];
5206                 int sizeLimit = reprocessCombination[i + 1];
5207 
5208                 appendFormatSize(b, format, sizeLimit);
5209                 b.append(" ");
5210             }
5211             b.append("}");
5212             return b.toString();
5213         }
5214 
getQueryableCombinations()5215         public final int[][] getQueryableCombinations() {
5216             return mQueryableCombinations;
5217         }
5218 
getInputResolutionIndex(int resolutionIndex)5219         int getInputResolutionIndex(int resolutionIndex) {
5220             switch (resolutionIndex) {
5221                 case MAXIMUM:
5222                     return INPUT_MAXIMUM;
5223                 case MAX_RES:
5224                     return INPUT_MAX_RES;
5225             }
5226             return -1;
5227         }
5228 
appendFormatSize(StringBuilder b, int format, int size)5229         private static void appendFormatSize(StringBuilder b, int format, int size) {
5230             switch (format) {
5231                 case PRIV:
5232                     b.append("[PRIV, ");
5233                     break;
5234                 case JPEG:
5235                     b.append("[JPEG, ");
5236                     break;
5237                 case JPEG_R:
5238                     b.append("[JPEG_R, ");
5239                     break;
5240                 case YUV:
5241                     b.append("[YUV, ");
5242                     break;
5243                 case Y8:
5244                     b.append("[Y8, ");
5245                     break;
5246                 case RAW:
5247                     b.append("[RAW, ");
5248                     break;
5249                 default:
5250                     b.append("[UNK, ");
5251                     break;
5252             }
5253 
5254             switch (size) {
5255                 case PREVIEW:
5256                     b.append("PREVIEW]");
5257                     break;
5258                 case RECORD:
5259                     b.append("RECORD]");
5260                     break;
5261                 case MAXIMUM:
5262                     b.append("MAXIMUM]");
5263                     break;
5264                 case VGA:
5265                     b.append("VGA]");
5266                     break;
5267                 case VGA_FULL_FOV:
5268                     b.append("VGA_FULL_FOV]");
5269                     break;
5270                 case MAX_30FPS:
5271                     b.append("MAX_30FPS]");
5272                     break;
5273                 case S720P:
5274                     b.append("S720P]");
5275                     break;
5276                 case S1440P_4_3:
5277                     b.append("S1440P_4_3]");
5278                     break;
5279                 case MAX_RES:
5280                     b.append("MAX_RES]");
5281                     break;
5282                 case S1080P:
5283                     b.append("S1080P]");
5284                     break;
5285                 case S1080P_4_3:
5286                     b.append("S1080P_4_3]");
5287                     break;
5288                 case S1440P_16_9:
5289                     b.append("S440P_16_9]");
5290                     break;
5291                 case XVGA:
5292                     b.append("XVGA]");
5293                     break;
5294                 case MAXIMUM_16_9:
5295                     b.append("MAXIMUM_16_9]");
5296                     break;
5297                 case MAXIMUM_4_3:
5298                     b.append("MAXIMUM_4_3]");
5299                     break;
5300                 case UHD:
5301                     b.append("UHD]");
5302                     break;
5303                 default:
5304                     b.append("UNK]");
5305                     break;
5306             }
5307         }
5308 
appendStreamUseCase(StringBuilder b, int streamUseCase)5309         private static void appendStreamUseCase(StringBuilder b, int streamUseCase) {
5310             b.append(", ");
5311             switch (streamUseCase) {
5312                 case USE_CASE_PREVIEW:
5313                     b.append("USE_CASE_PREVIEW");
5314                     break;
5315                 case USE_CASE_PREVIEW_VIDEO_STILL:
5316                     b.append("USE_CASE_PREVIEW_VIDEO_STILL");
5317                     break;
5318                 case USE_CASE_STILL_CAPTURE:
5319                     b.append("USE_CASE_STILL_CAPTURE");
5320                     break;
5321                 case USE_CASE_VIDEO_CALL:
5322                     b.append("USE_CASE_VIDEO_CALL");
5323                     break;
5324                 case USE_CASE_VIDEO_RECORD:
5325                     b.append("USE_CASE_VIDEO_RECORD");
5326                     break;
5327                 case USE_CASE_CROPPED_RAW:
5328                     b.append("USE_CASE_CROPPED_RAW");
5329                     break;
5330                 default:
5331                     b.append("UNK STREAM_USE_CASE");
5332                     break;
5333             }
5334             b.append(";");
5335         }
5336     }
5337 
getMaxRecordingSize(String cameraId)5338     private static Size getMaxRecordingSize(String cameraId) {
5339         int id = Integer.valueOf(cameraId);
5340 
5341         int quality =
5342                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_2160P)
5343                     ?  CamcorderProfile.QUALITY_2160P :
5344                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_1080P)
5345                     ?  CamcorderProfile.QUALITY_1080P :
5346                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_720P)
5347                     ?  CamcorderProfile.QUALITY_720P :
5348                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_480P)
5349                     ?  CamcorderProfile.QUALITY_480P :
5350                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_QVGA)
5351                     ?  CamcorderProfile.QUALITY_QVGA :
5352                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_CIF)
5353                     ?  CamcorderProfile.QUALITY_CIF :
5354                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_QCIF)
5355                     ?  CamcorderProfile.QUALITY_QCIF :
5356                     -1;
5357 
5358         assertTrue("No recording supported for camera id " + cameraId, quality != -1);
5359 
5360         CamcorderProfile maxProfile = CamcorderProfile.get(id, quality);
5361         return new Size(maxProfile.videoFrameWidth, maxProfile.videoFrameHeight);
5362     }
5363 
getMaxExternalRecordingSize( String cameraId, StreamConfigurationMap config)5364     private static Size getMaxExternalRecordingSize(
5365             String cameraId, StreamConfigurationMap config) {
5366         final Size fullHD = new Size(1920, 1080);
5367 
5368         Size[] videoSizeArr = config.getOutputSizes(android.media.MediaRecorder.class);
5369         List<Size> sizes = new ArrayList<Size>();
5370         for (Size sz: videoSizeArr) {
5371             if (sz.getWidth() <= fullHD.getWidth() && sz.getHeight() <= fullHD.getHeight()) {
5372                 sizes.add(sz);
5373             }
5374         }
5375         List<Size> videoSizes = getAscendingOrderSizes(sizes, /*ascending*/false);
5376         for (Size sz : videoSizes) {
5377             long minFrameDuration = config.getOutputMinFrameDuration(
5378                     android.media.MediaRecorder.class, sz);
5379             // Give some margin for rounding error
5380             if (minFrameDuration < (1e9 / 29.9)) {
5381                 Log.i(TAG, "External camera " + cameraId + " has max video size:" + sz);
5382                 return sz;
5383             }
5384         }
5385         fail("Camera " + cameraId + " does not support any 30fps video output");
5386         return fullHD; // doesn't matter what size is returned here
5387     }
5388 
getMaxPreviewSize(Context context, String cameraId)5389     private static Size getMaxPreviewSize(Context context, String cameraId) {
5390         try {
5391             WindowManager windowManager = context.getSystemService(WindowManager.class);
5392             assertNotNull("Could not find WindowManager service.", windowManager);
5393 
5394             WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
5395             Rect windowBounds = windowMetrics.getBounds();
5396 
5397             int width = windowBounds.width();
5398             int height = windowBounds.height();
5399 
5400             if (height > width) {
5401                 height = width;
5402                 width = windowBounds.height();
5403             }
5404 
5405             CameraManager camMgr = context.getSystemService(CameraManager.class);
5406             List<Size> orderedPreviewSizes = CameraTestUtils.getSupportedPreviewSizes(
5407                     cameraId, camMgr, PREVIEW_SIZE_BOUND);
5408 
5409             if (orderedPreviewSizes != null) {
5410                 for (Size size : orderedPreviewSizes) {
5411                     if (width >= size.getWidth()
5412                             && height >= size.getHeight()) {
5413                         return size;
5414                     }
5415                 }
5416             }
5417         } catch (Exception e) {
5418             Log.e(TAG, "getMaxPreviewSize Failed. " + e);
5419         }
5420         return PREVIEW_SIZE_BOUND;
5421     }
5422 
5423     /**
5424      * Use the external feature flag to check if external camera is supported.
5425      * If it is, iterate through the camera ids under test to verify that an
5426      * external camera is connected.
5427      *
5428      * @param cameraIds list of camera ids under test
5429      * @param packageManager package manager instance for checking feature flag
5430      * @param cameraManager camera manager for getting camera characteristics
5431      *
5432      */
verifyExternalCameraConnected(String[] cameraIds, PackageManager packageManager, CameraManager cameraManager)5433     public static void verifyExternalCameraConnected(String[] cameraIds,
5434             PackageManager packageManager, CameraManager cameraManager) throws Exception {
5435         if (packageManager.hasSystemFeature(PackageManager.FEATURE_CAMERA_EXTERNAL)) {
5436             boolean externalCameraConnected = false;
5437             for (int i = 0; i < cameraIds.length; i++) {
5438                 CameraCharacteristics props =
5439                         cameraManager.getCameraCharacteristics(cameraIds[i]);
5440                 assertNotNull("Can't get camera characteristics for camera "
5441                         + cameraIds[i], props);
5442                 Integer lensFacing = props.get(CameraCharacteristics.LENS_FACING);
5443                 assertNotNull("Can't get lens facing info", lensFacing);
5444                 if (lensFacing == CameraCharacteristics.LENS_FACING_EXTERNAL) {
5445                     externalCameraConnected = true;
5446                 }
5447             }
5448             assertTrue("External camera is not connected on device with FEATURE_CAMERA_EXTERNAL",
5449                     externalCameraConnected);
5450         }
5451     }
5452 
5453     /**
5454      * Verifies the presence of keys in the supportedKeys list.
5455      *
5456      * @param keys list of keys to be checked
5457      * @param supportedKeys list utilized to verify presence of keys
5458      * @param expectedResult true if keys should be present, false if not
5459      *
5460      */
checkKeysAreSupported(T[] keys, Set<T> supportedKeys, boolean expectedResult)5461     public static <T> void checkKeysAreSupported(T[] keys, Set<T> supportedKeys,
5462             boolean expectedResult) {
5463         String errorMsg = expectedResult ? " key should be present "
5464                 : " key should not be present ";
5465         for (T currKey : keys) {
5466             assertTrue(currKey + errorMsg
5467                     + " among the supported keys!",
5468                     supportedKeys.contains(currKey) == expectedResult);
5469         }
5470     }
5471 
5472 
5473     /**
5474      * Verifies the presence of keys in the supportedKeys list.
5475      *
5476      * @param keys list of keys to be checked
5477      * @param supportedKeys list utilized to verify presence of keys
5478      * @param expectedResult true if keys should be present, false if not
5479      *
5480      */
checkKeysAreSupported(List<T[]> keys, Set<T> supportedKeys, boolean expectedResult)5481     public static <T> void checkKeysAreSupported(List<T[]> keys, Set<T> supportedKeys,
5482             boolean expectedResult) {
5483         for (T[] k : keys) {
5484             checkKeysAreSupported(k, supportedKeys, expectedResult);
5485         }
5486     }
5487 
5488     /**
5489      * Check if the camera device keeps stabilization off
5490      *
5491      * @param result The capture request builder
5492      * @return true if stabilization is OFF
5493      */
isStabilizationOff(CaptureRequest request)5494     public static boolean isStabilizationOff(CaptureRequest request) {
5495         Integer stabilizationMode = request.get(
5496                 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE);
5497 
5498         return (stabilizationMode == null
5499                 || stabilizationMode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
5500     }
5501 }
5502