1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import static com.android.internal.util.Preconditions.checkArrayElementsNotNull;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.PixelFormat;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.legacy.LegacyCameraDevice;
28 import android.hardware.camera2.utils.HashCodeHelpers;
29 import android.hardware.camera2.utils.SurfaceUtils;
30 import android.util.Range;
31 import android.util.Size;
32 import android.util.SparseIntArray;
33 import android.view.Surface;
34 
35 import java.util.Arrays;
36 import java.util.HashMap;
37 import java.util.Objects;
38 import java.util.Set;
39 
40 /**
41  * Immutable class to store the available stream
42  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
43  * {@link android.view.Surface Surfaces} for creating a
44  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
45  * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
46  * <!-- TODO: link to input stream configuration -->
47  *
48  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
49  * for that format) that are supported by a camera device.</p>
50  *
51  * <p>This also contains the minimum frame durations and stall durations for each format/size
52  * combination that can be used to calculate effective frame rate when submitting multiple captures.
53  * </p>
54  *
55  * <p>An instance of this object is available from {@link CameraCharacteristics} using
56  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
57  * {@link CameraCharacteristics#get} method.</p>
58  *
59  * <pre><code>{@code
60  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
61  * StreamConfigurationMap configs = characteristics.get(
62  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
63  * }</code></pre>
64  *
65  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
66  * @see CameraDevice#createCaptureSession
67  */
68 public final class StreamConfigurationMap {
69 
70     private static final String TAG = "StreamConfigurationMap";
71 
72     /**
73      * Create a new {@link StreamConfigurationMap}.
74      *
75      * <p>The array parameters ownership is passed to this object after creation; do not
76      * write to them after this constructor is invoked.</p>
77      *
78      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
79      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
80      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
82      * @param depthMinFrameDurations a non-{@code null} array of depth
83      *        {@link StreamConfigurationDuration}
84      * @param depthStallDurations a non-{@code null} array of depth
85      *        {@link StreamConfigurationDuration}
86      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
87      *        {@link StreamConfiguration}
88      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
89      *        {@link StreamConfigurationDuration}
90      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
91      *        {@link StreamConfigurationDuration}
92      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
93      * @param heicMinFrameDurations a non-{@code null} array of heic
94      *        {@link StreamConfigurationDuration}
95      * @param heicStallDurations a non-{@code null} array of heic
96      *        {@link StreamConfigurationDuration}
97      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
98      *        camera device does not support high speed video recording
99      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
100      *        and thus needs a separate list of slow high-resolution output sizes
101      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
102      *         were {@code null} or any subelements were {@code null}
103      *
104      * @hide
105      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)106     public StreamConfigurationMap(
107             StreamConfiguration[] configurations,
108             StreamConfigurationDuration[] minFrameDurations,
109             StreamConfigurationDuration[] stallDurations,
110             StreamConfiguration[] depthConfigurations,
111             StreamConfigurationDuration[] depthMinFrameDurations,
112             StreamConfigurationDuration[] depthStallDurations,
113             StreamConfiguration[] dynamicDepthConfigurations,
114             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
115             StreamConfigurationDuration[] dynamicDepthStallDurations,
116             StreamConfiguration[] heicConfigurations,
117             StreamConfigurationDuration[] heicMinFrameDurations,
118             StreamConfigurationDuration[] heicStallDurations,
119             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
120             ReprocessFormatsMap inputOutputFormatsMap,
121             boolean listHighResolution) {
122         this(configurations, minFrameDurations, stallDurations,
123                     depthConfigurations, depthMinFrameDurations, depthStallDurations,
124                     dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
125                     dynamicDepthStallDurations,
126                     heicConfigurations, heicMinFrameDurations, heicStallDurations,
127                     highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
128                     /*enforceImplementationDefined*/ true);
129     }
130 
131     /**
132      * Create a new {@link StreamConfigurationMap}.
133      *
134      * <p>The array parameters ownership is passed to this object after creation; do not
135      * write to them after this constructor is invoked.</p>
136      *
137      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
138      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
139      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
140      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
141      * @param depthMinFrameDurations a non-{@code null} array of depth
142      *        {@link StreamConfigurationDuration}
143      * @param depthStallDurations a non-{@code null} array of depth
144      *        {@link StreamConfigurationDuration}
145      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
146      *        {@link StreamConfiguration}
147      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
148      *        {@link StreamConfigurationDuration}
149      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
150      *        {@link StreamConfigurationDuration}
151      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
152      * @param heicMinFrameDurations a non-{@code null} array of heic
153      *        {@link StreamConfigurationDuration}
154      * @param heicStallDurations a non-{@code null} array of heic
155      *        {@link StreamConfigurationDuration}
156      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
157      *        camera device does not support high speed video recording
158      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
159      *        and thus needs a separate list of slow high-resolution output sizes
160      * @param enforceImplementationDefined a flag indicating whether
161      *        IMPLEMENTATION_DEFINED format configuration must be present
162      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
163      *         were {@code null} or any subelements were {@code null}
164      *
165      * @hide
166      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, boolean enforceImplementationDefined)167     public StreamConfigurationMap(
168             StreamConfiguration[] configurations,
169             StreamConfigurationDuration[] minFrameDurations,
170             StreamConfigurationDuration[] stallDurations,
171             StreamConfiguration[] depthConfigurations,
172             StreamConfigurationDuration[] depthMinFrameDurations,
173             StreamConfigurationDuration[] depthStallDurations,
174             StreamConfiguration[] dynamicDepthConfigurations,
175             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
176             StreamConfigurationDuration[] dynamicDepthStallDurations,
177             StreamConfiguration[] heicConfigurations,
178             StreamConfigurationDuration[] heicMinFrameDurations,
179             StreamConfigurationDuration[] heicStallDurations,
180             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
181             ReprocessFormatsMap inputOutputFormatsMap,
182             boolean listHighResolution,
183             boolean enforceImplementationDefined) {
184 
185         if (configurations == null &&
186                 depthConfigurations == null &&
187                 heicConfigurations == null) {
188             throw new NullPointerException("At least one of color/depth/heic configurations " +
189                     "must not be null");
190         }
191 
192         if (configurations == null) {
193             // If no color configurations exist, ensure depth ones do
194             mConfigurations = new StreamConfiguration[0];
195             mMinFrameDurations = new StreamConfigurationDuration[0];
196             mStallDurations = new StreamConfigurationDuration[0];
197         } else {
198             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
199             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
200             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
201         }
202 
203         mListHighResolution = listHighResolution;
204 
205         if (depthConfigurations == null) {
206             mDepthConfigurations = new StreamConfiguration[0];
207             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
208             mDepthStallDurations = new StreamConfigurationDuration[0];
209         } else {
210             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
211                     "depthConfigurations");
212             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
213                     "depthMinFrameDurations");
214             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
215                     "depthStallDurations");
216         }
217 
218         if (dynamicDepthConfigurations == null) {
219             mDynamicDepthConfigurations = new StreamConfiguration[0];
220             mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
221             mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
222         } else {
223             mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
224                     "dynamicDepthConfigurations");
225             mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
226                     dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
227             mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
228                     "dynamicDepthStallDurations");
229         }
230 
231         if (heicConfigurations == null) {
232             mHeicConfigurations = new StreamConfiguration[0];
233             mHeicMinFrameDurations = new StreamConfigurationDuration[0];
234             mHeicStallDurations = new StreamConfigurationDuration[0];
235         } else {
236             mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
237                     "heicConfigurations");
238             mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
239                     "heicMinFrameDurations");
240             mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
241                     "heicStallDurations");
242         }
243 
244         if (highSpeedVideoConfigurations == null) {
245             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
246         } else {
247             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
248                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
249         }
250 
251         // For each format, track how many sizes there are available to configure
252         for (StreamConfiguration config : mConfigurations) {
253             int fmt = config.getFormat();
254             SparseIntArray map = null;
255             if (config.isOutput()) {
256                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
257                 long duration = 0;
258                 if (mListHighResolution) {
259                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
260                         if (configurationDuration.getFormat() == fmt &&
261                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
262                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
263                             duration = configurationDuration.getDuration();
264                             break;
265                         }
266                     }
267                 }
268                 map = duration <= DURATION_20FPS_NS ?
269                         mOutputFormats : mHighResOutputFormats;
270             } else {
271                 map = mInputFormats;
272             }
273             map.put(fmt, map.get(fmt) + 1);
274         }
275 
276         // For each depth format, track how many sizes there are available to configure
277         for (StreamConfiguration config : mDepthConfigurations) {
278             if (!config.isOutput()) {
279                 // Ignoring input depth configs
280                 continue;
281             }
282 
283             mDepthOutputFormats.put(config.getFormat(),
284                     mDepthOutputFormats.get(config.getFormat()) + 1);
285         }
286         for (StreamConfiguration config : mDynamicDepthConfigurations) {
287             if (!config.isOutput()) {
288                 // Ignoring input configs
289                 continue;
290             }
291 
292             mDynamicDepthOutputFormats.put(config.getFormat(),
293                     mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
294         }
295 
296         // For each heic format, track how many sizes there are available to configure
297         for (StreamConfiguration config : mHeicConfigurations) {
298             if (!config.isOutput()) {
299                 // Ignoring input depth configs
300                 continue;
301             }
302 
303             mHeicOutputFormats.put(config.getFormat(),
304                     mHeicOutputFormats.get(config.getFormat()) + 1);
305         }
306 
307         if (configurations != null && enforceImplementationDefined &&
308                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
309             throw new AssertionError(
310                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
311         }
312 
313         // For each Size/FPS range, track how many FPS range/Size there are available
314         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
315             Size size = config.getSize();
316             Range<Integer> fpsRange = config.getFpsRange();
317             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
318             if (fpsRangeCount == null) {
319                 fpsRangeCount = 0;
320             }
321             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
322             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
323             if (sizeCount == null) {
324                 sizeCount = 0;
325             }
326             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
327         }
328 
329         mInputOutputFormatsMap = inputOutputFormatsMap;
330     }
331 
332     /**
333      * Get the image {@code format} output formats in this stream configuration.
334      *
335      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
336      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
337      *
338      * <p>Formats listed in this array are guaranteed to return true if queried with
339      * {@link #isOutputSupportedFor(int)}.</p>
340      *
341      * @return an array of integer format
342      *
343      * @see ImageFormat
344      * @see PixelFormat
345      */
getOutputFormats()346     public int[] getOutputFormats() {
347         return getPublicFormats(/*output*/true);
348     }
349 
350     /**
351      * Get the image {@code format} output formats for a reprocessing input format.
352      *
353      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
354      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
355      * listed in the return value of this method. Including any other output Surface as a target
356      * will throw an IllegalArgumentException. If no output format is supported given the input
357      * format, an empty int[] will be returned.</p>
358      *
359      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
360      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
361      *
362      * <p>Formats listed in this array are guaranteed to return true if queried with
363      * {@link #isOutputSupportedFor(int)}.</p>
364      *
365      * @return an array of integer format
366      *
367      * @see ImageFormat
368      * @see PixelFormat
369      */
getValidOutputFormatsForInput(int inputFormat)370     public int[] getValidOutputFormatsForInput(int inputFormat) {
371         if (mInputOutputFormatsMap == null) {
372             return new int[0];
373         }
374 
375         int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
376         if (mHeicOutputFormats.size() > 0) {
377             // All reprocessing formats map contain JPEG.
378             int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
379             outputsWithHeic[outputs.length] = ImageFormat.HEIC;
380             return outputsWithHeic;
381         } else {
382             return outputs;
383         }
384     }
385 
386     /**
387      * Get the image {@code format} input formats in this stream configuration.
388      *
389      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
390      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
391      *
392      * @return an array of integer format
393      *
394      * @see ImageFormat
395      * @see PixelFormat
396      */
getInputFormats()397     public int[] getInputFormats() {
398         return getPublicFormats(/*output*/false);
399     }
400 
401     /**
402      * Get the supported input sizes for this input format.
403      *
404      * <p>The format must have come from {@link #getInputFormats}; otherwise
405      * {@code null} is returned.</p>
406      *
407      * @param format a format from {@link #getInputFormats}
408      * @return a non-empty array of sizes, or {@code null} if the format was not available.
409      */
getInputSizes(final int format)410     public Size[] getInputSizes(final int format) {
411         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
412     }
413 
414     /**
415      * Determine whether or not output surfaces with a particular user-defined format can be passed
416      * {@link CameraDevice#createCaptureSession createCaptureSession}.
417      *
418      * <p>This method determines that the output {@code format} is supported by the camera device;
419      * each output {@code surface} target may or may not itself support that {@code format}.
420      * Refer to the class which provides the surface for additional documentation.</p>
421      *
422      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
423      * returned by {@link #getOutputSizes}.</p>
424      *
425      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
426      * @return
427      *          {@code true} iff using a {@code surface} with this {@code format} will be
428      *          supported with {@link CameraDevice#createCaptureSession}
429      *
430      * @throws IllegalArgumentException
431      *          if the image format was not a defined named constant
432      *          from either {@link ImageFormat} or {@link PixelFormat}
433      *
434      * @see ImageFormat
435      * @see PixelFormat
436      * @see CameraDevice#createCaptureSession
437      */
isOutputSupportedFor(int format)438     public boolean isOutputSupportedFor(int format) {
439         checkArgumentFormat(format);
440 
441         int internalFormat = imageFormatToInternal(format);
442         int dataspace = imageFormatToDataspace(format);
443         if (dataspace == HAL_DATASPACE_DEPTH) {
444             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
445         } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
446             return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
447         } else if (dataspace == HAL_DATASPACE_HEIF) {
448             return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
449         } else {
450             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
451         }
452     }
453 
454     /**
455      * Determine whether or not output streams can be configured with a particular class
456      * as a consumer.
457      *
458      * <p>The following list is generally usable for outputs:
459      * <ul>
460      * <li>{@link android.media.ImageReader} -
461      * Recommended for image processing or streaming to external resources (such as a file or
462      * network)
463      * <li>{@link android.media.MediaRecorder} -
464      * Recommended for recording video (simple to use)
465      * <li>{@link android.media.MediaCodec} -
466      * Recommended for recording video (more complicated to use, with more flexibility)
467      * <li>{@link android.renderscript.Allocation} -
468      * Recommended for image processing with {@link android.renderscript RenderScript}
469      * <li>{@link android.view.SurfaceHolder} -
470      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
471      * <li>{@link android.graphics.SurfaceTexture} -
472      * Recommended for OpenGL-accelerated preview processing or compositing with
473      * {@link android.view.TextureView}
474      * </ul>
475      * </p>
476      *
477      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
478      * provide a producer endpoint that is suitable to be used with
479      * {@link CameraDevice#createCaptureSession}.</p>
480      *
481      * <p>Since not all of the above classes support output of all format and size combinations,
482      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
483      *
484      * @param klass a non-{@code null} {@link Class} object reference
485      * @return {@code true} if this class is supported as an output, {@code false} otherwise
486      *
487      * @throws NullPointerException if {@code klass} was {@code null}
488      *
489      * @see CameraDevice#createCaptureSession
490      * @see #isOutputSupportedFor(Surface)
491      */
isOutputSupportedFor(Class<T> klass)492     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
493         Objects.requireNonNull(klass, "klass must not be null");
494 
495         if (klass == android.media.ImageReader.class) {
496             return true;
497         } else if (klass == android.media.MediaRecorder.class) {
498             return true;
499         } else if (klass == android.media.MediaCodec.class) {
500             return true;
501         } else if (klass == android.renderscript.Allocation.class) {
502             return true;
503         } else if (klass == android.view.SurfaceHolder.class) {
504             return true;
505         } else if (klass == android.graphics.SurfaceTexture.class) {
506             return true;
507         }
508 
509         return false;
510     }
511 
512     /**
513      * Determine whether or not the {@code surface} in its current state is suitable to be included
514      * in a {@link CameraDevice#createCaptureSession capture session} as an output.
515      *
516      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
517      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
518      * compatible with the {@link CameraDevice} in general
519      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
520      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
521      *
522      * <p>Reasons for a {@code surface} being specifically incompatible might be:
523      * <ul>
524      * <li>Using a format that's not listed by {@link #getOutputFormats}
525      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
526      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
527      * </li>
528      * </ul>
529      *
530      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
531      * not match a camera-supported size, as long as the format (or class) is supported and the
532      * camera device supports a size that is equal to or less than 1080p in that format. If such as
533      * Surface is used to create a capture session, it will have its size rounded to the nearest
534      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
535      * and ImageReader.</p>
536      *
537      * <p>This is not an exhaustive list; see the particular class's documentation for further
538      * possible reasons of incompatibility.</p>
539      *
540      * @param surface a non-{@code null} {@link Surface} object reference
541      * @return {@code true} if this is supported, {@code false} otherwise
542      *
543      * @throws NullPointerException if {@code surface} was {@code null}
544      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
545      *
546      * @see CameraDevice#createCaptureSession
547      * @see #isOutputSupportedFor(Class)
548      */
isOutputSupportedFor(Surface surface)549     public boolean isOutputSupportedFor(Surface surface) {
550         Objects.requireNonNull(surface, "surface must not be null");
551 
552         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
553         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
554         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
555 
556         // See if consumer is flexible.
557         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
558 
559         StreamConfiguration[] configs =
560                 surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
561                 surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
562                 surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
563                 mConfigurations;
564         for (StreamConfiguration config : configs) {
565             if (config.getFormat() == surfaceFormat && config.isOutput()) {
566                 // Matching format, either need exact size match, or a flexible consumer
567                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
568                 if (config.getSize().equals(surfaceSize)) {
569                     return true;
570                 } else if (isFlexible &&
571                         (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
572                     return true;
573                 }
574             }
575         }
576         return false;
577     }
578 
579     /**
580      * Determine whether or not the particular stream configuration is suitable to be included
581      * in a {@link CameraDevice#createCaptureSession capture session} as an output.
582      *
583      * @param size stream configuration size
584      * @param format stream configuration format
585      * @return {@code true} if this is supported, {@code false} otherwise
586      *
587      * @see CameraDevice#createCaptureSession
588      * @see #isOutputSupportedFor(Class)
589      * @hide
590      */
isOutputSupportedFor(Size size, int format)591     public boolean isOutputSupportedFor(Size size, int format) {
592         int internalFormat = imageFormatToInternal(format);
593         int dataspace = imageFormatToDataspace(format);
594 
595         StreamConfiguration[] configs =
596                 dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
597                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
598                 dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
599                 mConfigurations;
600         for (StreamConfiguration config : configs) {
601             if ((config.getFormat() == internalFormat) && config.isOutput() &&
602                     config.getSize().equals(size)) {
603                 return true;
604             }
605         }
606 
607         return false;
608     }
609 
610     /**
611      * Get a list of sizes compatible with {@code klass} to use as an output.
612      *
613      * <p>Some of the supported classes may support additional formats beyond
614      * {@link ImageFormat#PRIVATE}; this function only returns
615      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
616      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
617      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
618      * class.</p>
619      *
620      * <p>If a well-defined format such as {@code NV21} is required, use
621      * {@link #getOutputSizes(int)} instead.</p>
622      *
623      * <p>The {@code klass} should be a supported output, that querying
624      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
625      *
626      * @param klass
627      *          a non-{@code null} {@link Class} object reference
628      * @return
629      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
630      *          or {@code null} iff the {@code klass} is not a supported output.
631      *
632      *
633      * @throws NullPointerException if {@code klass} was {@code null}
634      *
635      * @see #isOutputSupportedFor(Class)
636      */
getOutputSizes(Class<T> klass)637     public <T> Size[] getOutputSizes(Class<T> klass) {
638         if (isOutputSupportedFor(klass) == false) {
639             return null;
640         }
641 
642         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
643                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
644     }
645 
646     /**
647      * Get a list of sizes compatible with the requested image {@code format}.
648      *
649      * <p>The {@code format} should be a supported format (one of the formats returned by
650      * {@link #getOutputFormats}).</p>
651      *
652      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
653      * that support the
654      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
655      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
656      * 20fps rate. This means that for some supported formats, this method will return an empty
657      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
658      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
659      *
660      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
661      * @return
662      *          an array of supported sizes,
663      *          or {@code null} if the {@code format} is not a supported output
664      *
665      * @see ImageFormat
666      * @see PixelFormat
667      * @see #getOutputFormats
668      */
getOutputSizes(int format)669     public Size[] getOutputSizes(int format) {
670         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
671     }
672 
673     /**
674      * Get a list of supported high speed video recording sizes.
675      * <p>
676      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
677      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
678      * list the supported high speed video size configurations. All the sizes listed will be a
679      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
680      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
681      * </p>
682      * <p>
683      * To enable high speed video recording, application must create a constrained create high speed
684      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
685      * a CaptureRequest list created by
686      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
687      * to this session. The application must select the video size from this method and
688      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
689      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
690      * generate the high speed request list. For example, if the application intends to do high
691      * speed recording, it can select the maximum size reported by this method to create high speed
692      * capture session. Note that for the use case of multiple output streams, application must
693      * select one unique size from this method to use (e.g., preview and recording streams must have
694      * the same size). Otherwise, the high speed session creation will fail. Once the size is
695      * selected, application can get the supported FPS ranges by
696      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
697      * request lists via
698      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
699      * </p>
700      *
701      * @return an array of supported high speed video recording sizes
702      * @see #getHighSpeedVideoFpsRangesFor(Size)
703      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
704      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
705      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
706      */
getHighSpeedVideoSizes()707     public Size[] getHighSpeedVideoSizes() {
708         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
709         return keySet.toArray(new Size[keySet.size()]);
710     }
711 
712     /**
713      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
714      * <p>
715      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
716      * </p>
717      * <p>
718      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
719      * must not be used to setup capture requests that are submitted to unconstrained capture
720      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
721      * </p>
722      * <p>
723      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
724      * </p>
725      *
726      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
727      * @return an array of supported high speed video recording FPS ranges The upper bound of
728      *         returned ranges is guaranteed to be greater than or equal to 120.
729      * @throws IllegalArgumentException if input size does not exist in the return value of
730      *             getHighSpeedVideoSizes
731      * @see #getHighSpeedVideoSizes()
732      * @see #getHighSpeedVideoFpsRanges()
733      */
getHighSpeedVideoFpsRangesFor(Size size)734     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
735         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
736         if (fpsRangeCount == null || fpsRangeCount == 0) {
737             throw new IllegalArgumentException(String.format(
738                     "Size %s does not support high speed video recording", size));
739         }
740 
741         @SuppressWarnings("unchecked")
742         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
743         int i = 0;
744         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
745             if (size.equals(config.getSize())) {
746                 fpsRanges[i++] = config.getFpsRange();
747             }
748         }
749         return fpsRanges;
750     }
751 
752     /**
753      * Get a list of supported high speed video recording FPS ranges.
754      * <p>
755      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
756      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
757      * list the supported high speed video FPS range configurations. Application can then use
758      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
759      * </p>
760      * <p>
761      * To enable high speed video recording, application must create a constrained create high speed
762      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
763      * a CaptureRequest list created by
764      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
765      * to this session. The application must select the video size from this method and
766      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
767      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
768      * generate the high speed request list. For example, if the application intends to do high
769      * speed recording, it can select one FPS range reported by this method, query the video sizes
770      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
771      * sizes to create a high speed capture session. Note that for the use case of multiple output
772      * streams, application must select one unique size from this method to use (e.g., preview and
773      * recording streams must have the same size). Otherwise, the high speed session creation will
774      * fail. Once the high speed capture session is created, the application can set the FPS range
775      * in the recording request lists via
776      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
777      * </p>
778      * <p>
779      * The FPS ranges reported by this method will have below characteristics:
780      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
781      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
782      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
783      * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30,
784      * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the
785      * application doesn't want the camera device always produce higher frame rate than the display
786      * refresh rate.</li>
787      * </p>
788      *
789      * @return an array of supported high speed video recording FPS ranges The upper bound of
790      *         returned ranges is guaranteed to be larger or equal to 120.
791      * @see #getHighSpeedVideoSizesFor
792      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
793      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
794      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
795      */
796     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()797     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
798         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
799         return keySet.toArray(new Range[keySet.size()]);
800     }
801 
802     /**
803      * Get the supported video sizes for an input high speed FPS range.
804      *
805      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
806      *
807      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
808      * @return An array of video sizes to create high speed capture sessions for high speed streaming
809      *         use cases.
810      *
811      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
812      *         getHighSpeedVideoFpsRanges
813      * @see #getHighSpeedVideoFpsRanges()
814      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)815     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
816         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
817         if (sizeCount == null || sizeCount == 0) {
818             throw new IllegalArgumentException(String.format(
819                     "FpsRange %s does not support high speed video recording", fpsRange));
820         }
821 
822         Size[] sizes = new Size[sizeCount];
823         int i = 0;
824         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
825             if (fpsRange.equals(config.getFpsRange())) {
826                 sizes[i++] = config.getSize();
827             }
828         }
829         return sizes;
830     }
831 
832     /**
833      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
834      * rate.
835      *
836      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
837      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
838      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
839      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
840      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list that
841      * are less than 24 megapixels are still guaranteed to operate at a rate of at least 10 fps,
842      * not including stall duration. Sizes on this list that are at least 24 megapixels are allowed
843      * to operate at less than 10 fps.</p>
844      *
845      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
846      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
847      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
848      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
849      * fps requirement.</p>
850      *
851      * @return an array of supported slower high-resolution sizes, or {@code null} if the
852      *         BURST_CAPTURE capability is not supported
853      */
getHighResolutionOutputSizes(int format)854     public Size[] getHighResolutionOutputSizes(int format) {
855         if (!mListHighResolution) return null;
856 
857         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
858     }
859 
860     /**
861      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
862      * for the format/size combination (in nanoseconds).
863      *
864      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
865      * <p>{@code size} should be one of the ones returned by
866      * {@link #getOutputSizes(int)}.</p>
867      *
868      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
869      * that stream is configured in a session, with all processing (typically in
870      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
871      *
872      * <p>When multiple streams are used in a session, the minimum frame duration will be
873      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
874      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
875      * capture request.</p>
876      *
877      * <p>For devices that do not support manual sensor control
878      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
879      * this function may return 0.</p>
880      *
881      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
882      * regardless of whether the stream is input or output.</p>
883      *
884      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
885      * @param size an output-compatible size
886      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
887      *          0 if the minimum frame duration is not available.
888      *
889      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
890      * @throws NullPointerException if {@code size} was {@code null}
891      *
892      * @see CaptureRequest#SENSOR_FRAME_DURATION
893      * @see #getOutputStallDuration(int, Size)
894      * @see ImageFormat
895      * @see PixelFormat
896      */
getOutputMinFrameDuration(int format, Size size)897     public long getOutputMinFrameDuration(int format, Size size) {
898         Objects.requireNonNull(size, "size must not be null");
899         checkArgumentFormatSupported(format, /*output*/true);
900 
901         return getInternalFormatDuration(imageFormatToInternal(format),
902                 imageFormatToDataspace(format),
903                 size,
904                 DURATION_MIN_FRAME);
905     }
906 
907     /**
908      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
909      * for the class/size combination (in nanoseconds).
910      *
911      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
912      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
913      *
914      * <p>{@code klass} should be one of the ones which is supported by
915      * {@link #isOutputSupportedFor(Class)}.</p>
916      *
917      * <p>{@code size} should be one of the ones returned by
918      * {@link #getOutputSizes(int)}.</p>
919      *
920      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
921      * that stream is configured in a session, with all processing (typically in
922      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
923      *
924      * <p>When multiple streams are used in a session, the minimum frame duration will be
925      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
926      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
927      * capture request.</p>
928      *
929      * <p>For devices that do not support manual sensor control
930      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
931      * this function may return 0.</p>
932      *
933      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
934      * regardless of whether the stream is input or output.</p>
935      *
936      * @param klass
937      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
938      *          non-empty array returned by {@link #getOutputSizes(Class)}
939      * @param size an output-compatible size
940      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
941      *          0 if the minimum frame duration is not available.
942      *
943      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
944      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
945      *
946      * @see CaptureRequest#SENSOR_FRAME_DURATION
947      * @see ImageFormat
948      * @see PixelFormat
949      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)950     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
951         if (!isOutputSupportedFor(klass)) {
952             throw new IllegalArgumentException("klass was not supported");
953         }
954 
955         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
956                 HAL_DATASPACE_UNKNOWN,
957                 size, DURATION_MIN_FRAME);
958     }
959 
960     /**
961      * Get the stall duration for the format/size combination (in nanoseconds).
962      *
963      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
964      * <p>{@code size} should be one of the ones returned by
965      * {@link #getOutputSizes(int)}.</p>
966      *
967      * <p>
968      * A stall duration is how much extra time would get added to the normal minimum frame duration
969      * for a repeating request that has streams with non-zero stall.
970      *
971      * <p>For example, consider JPEG captures which have the following characteristics:
972      *
973      * <ul>
974      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
975      * in requests in which they are directly referenced, they act as JPEG streams.
976      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
977      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
978      * requests that actually reference a JPEG stream.
979      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
980      * process more than 1 capture at a time.
981      * </ul>
982      *
983      * <p>In other words, using a repeating YUV request would result in a steady frame rate
984      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
985      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
986      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
987      * 30 FPS.</p>
988      *
989      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
990      * frame rate drop unless there are still outstanding buffers for that stream from previous
991      * requests.</p>
992      *
993      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
994      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
995      * added with the maximum stall duration for {@code S}.</p>
996      *
997      * <p>If interleaving requests with and without a stall duration, a request will stall by the
998      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
999      *
1000      * <p>This means that a stalling request will not have an exposure start until the stall has
1001      * completed.</p>
1002      *
1003      * <p>This should correspond to the stall duration when only that stream is active, with all
1004      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
1005      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
1006      * indeterminate stall duration for all streams in a request (the regular stall calculation
1007      * rules are ignored).</p>
1008      *
1009      * <p>The following formats may always have a stall duration:
1010      * <ul>
1011      * <li>{@link ImageFormat#JPEG JPEG}
1012      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
1013      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
1014      * </ul>
1015      * </p>
1016      *
1017      * <p>The following formats will never have a stall duration:
1018      * <ul>
1019      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
1020      * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
1021      * </ul></p>
1022      *
1023      * <p>
1024      * All other formats may or may not have an allowed stall duration on a per-capability basis;
1025      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1026      * android.request.availableCapabilities} for more details.</p>
1027      * </p>
1028      *
1029      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1030      * for more information about calculating the max frame rate (absent stalls).</p>
1031      *
1032      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1033      * @param size an output-compatible size
1034      * @return a stall duration {@code >=} 0 in nanoseconds
1035      *
1036      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1037      * @throws NullPointerException if {@code size} was {@code null}
1038      *
1039      * @see CaptureRequest#SENSOR_FRAME_DURATION
1040      * @see ImageFormat
1041      * @see PixelFormat
1042      */
getOutputStallDuration(int format, Size size)1043     public long getOutputStallDuration(int format, Size size) {
1044         checkArgumentFormatSupported(format, /*output*/true);
1045 
1046         return getInternalFormatDuration(imageFormatToInternal(format),
1047                 imageFormatToDataspace(format),
1048                 size,
1049                 DURATION_STALL);
1050     }
1051 
1052     /**
1053      * Get the stall duration for the class/size combination (in nanoseconds).
1054      *
1055      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1056      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1057      *
1058      * <p>{@code klass} should be one of the ones with a non-empty array returned by
1059      * {@link #getOutputSizes(Class)}.</p>
1060      *
1061      * <p>{@code size} should be one of the ones returned by
1062      * {@link #getOutputSizes(Class)}.</p>
1063      *
1064      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
1065      * <em>stall duration</em>.</p>
1066      *
1067      * @param klass
1068      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1069      *          non-empty array returned by {@link #getOutputSizes(Class)}
1070      * @param size an output-compatible size
1071      * @return a minimum frame duration {@code >=} 0 in nanoseconds
1072      *
1073      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1074      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1075      *
1076      * @see CaptureRequest#SENSOR_FRAME_DURATION
1077      * @see ImageFormat
1078      * @see PixelFormat
1079      */
getOutputStallDuration(final Class<T> klass, final Size size)1080     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
1081         if (!isOutputSupportedFor(klass)) {
1082             throw new IllegalArgumentException("klass was not supported");
1083         }
1084 
1085         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1086                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
1087     }
1088 
1089     /**
1090      * Check if this {@link StreamConfigurationMap} is equal to another
1091      * {@link StreamConfigurationMap}.
1092      *
1093      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
1094      *
1095      * @return {@code true} if the objects were equal, {@code false} otherwise
1096      */
1097     @Override
equals(final Object obj)1098     public boolean equals(final Object obj) {
1099         if (obj == null) {
1100             return false;
1101         }
1102         if (this == obj) {
1103             return true;
1104         }
1105         if (obj instanceof StreamConfigurationMap) {
1106             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
1107             // XX: do we care about order?
1108             return Arrays.equals(mConfigurations, other.mConfigurations) &&
1109                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
1110                     Arrays.equals(mStallDurations, other.mStallDurations) &&
1111                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
1112                     Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
1113                     Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
1114                     Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
1115                     Arrays.equals(mDynamicDepthMinFrameDurations,
1116                             other.mDynamicDepthMinFrameDurations) &&
1117                     Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
1118                     Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
1119                     Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
1120                     Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
1121                     Arrays.equals(mHighSpeedVideoConfigurations,
1122                             other.mHighSpeedVideoConfigurations);
1123         }
1124         return false;
1125     }
1126 
1127     /**
1128      * {@inheritDoc}
1129      */
1130     @Override
hashCode()1131     public int hashCode() {
1132         // XX: do we care about order?
1133         return HashCodeHelpers.hashCodeGeneric(
1134                 mConfigurations, mMinFrameDurations, mStallDurations,
1135                 mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
1136                 mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
1137                 mDynamicDepthStallDurations, mHeicConfigurations,
1138                 mHeicMinFrameDurations, mHeicStallDurations,
1139                 mHighSpeedVideoConfigurations);
1140     }
1141 
1142     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)1143     private int checkArgumentFormatSupported(int format, boolean output) {
1144         checkArgumentFormat(format);
1145 
1146         int internalFormat = imageFormatToInternal(format);
1147         int internalDataspace = imageFormatToDataspace(format);
1148 
1149         if (output) {
1150             if (internalDataspace == HAL_DATASPACE_DEPTH) {
1151                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1152                     return format;
1153                 }
1154             } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
1155                 if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1156                     return format;
1157                 }
1158             } else if (internalDataspace == HAL_DATASPACE_HEIF) {
1159                 if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
1160                     return format;
1161                 }
1162             } else {
1163                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
1164                     return format;
1165                 }
1166             }
1167         } else {
1168             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
1169                 return format;
1170             }
1171         }
1172 
1173         throw new IllegalArgumentException(String.format(
1174                 "format %x is not supported by this stream configuration map", format));
1175     }
1176 
1177     /**
1178      * Ensures that the format is either user-defined or implementation defined.
1179      *
1180      * <p>If a format has a different internal representation than the public representation,
1181      * passing in the public representation here will fail.</p>
1182      *
1183      * <p>For example if trying to use {@link ImageFormat#JPEG}:
1184      * it has a different public representation than the internal representation
1185      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
1186      *
1187      * <p>Any invalid/undefined formats will raise an exception.</p>
1188      *
1189      * @param format image format
1190      * @return the format
1191      *
1192      * @throws IllegalArgumentException if the format was invalid
1193      */
checkArgumentFormatInternal(int format)1194     static int checkArgumentFormatInternal(int format) {
1195         switch (format) {
1196             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1197             case HAL_PIXEL_FORMAT_BLOB:
1198             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1199             case HAL_PIXEL_FORMAT_Y16:
1200                 return format;
1201             case ImageFormat.JPEG:
1202             case ImageFormat.HEIC:
1203                 throw new IllegalArgumentException(
1204                         "An unknown internal format: " + format);
1205             default:
1206                 return checkArgumentFormat(format);
1207         }
1208     }
1209 
1210     /**
1211      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1212      *
1213      * <p>If a format has a different public representation than the internal representation,
1214      * passing in the internal representation here will fail.</p>
1215      *
1216      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1217      * it has a different internal representation than the public representation
1218      * {@link ImageFormat#JPEG}, this check will fail.</p>
1219      *
1220      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1221      * </p>
1222      *
1223      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1224      *
1225      * @param format image format
1226      * @return the format
1227      *
1228      * @throws IllegalArgumentException if the format was not user-defined
1229      */
checkArgumentFormat(int format)1230     static int checkArgumentFormat(int format) {
1231         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1232             throw new IllegalArgumentException(String.format(
1233                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1234         }
1235 
1236         return format;
1237     }
1238 
1239     /**
1240      * Convert an internal format compatible with {@code graphics.h} into public-visible
1241      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1242      *
1243      * <p>In particular these formats are converted:
1244      * <ul>
1245      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1246      * </ul>
1247      * </p>
1248      *
1249      * <p>Passing in a format which has no public equivalent will fail;
1250      * as will passing in a public format which has a different internal format equivalent.
1251      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1252      *
1253      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1254      *
1255      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1256      * HAL_DATASPACE_DEPTH.</p>
1257      *
1258      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1259      * @return the converted image formats
1260      *
1261      * @throws IllegalArgumentException
1262      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1263      *          {@link ImageFormat#JPEG}
1264      *
1265      * @see ImageFormat
1266      * @see PixelFormat
1267      * @see #checkArgumentFormat
1268      * @hide
1269      */
imageFormatToPublic(int format)1270     public static int imageFormatToPublic(int format) {
1271         switch (format) {
1272             case HAL_PIXEL_FORMAT_BLOB:
1273                 return ImageFormat.JPEG;
1274             case ImageFormat.JPEG:
1275                 throw new IllegalArgumentException(
1276                         "ImageFormat.JPEG is an unknown internal format");
1277             default:
1278                 return format;
1279         }
1280     }
1281 
1282     /**
1283      * Convert an internal format compatible with {@code graphics.h} into public-visible
1284      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1285      *
1286      * <p>In particular these formats are converted:
1287      * <ul>
1288      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1289      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1290      * </ul>
1291      * </p>
1292      *
1293      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1294      * as will passing in a public format which has a different internal format equivalent.
1295      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1296      *
1297      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1298      *
1299      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1300      * HAL_DATASPACE_DEPTH.</p>
1301      *
1302      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1303      * @return the converted image formats
1304      *
1305      * @throws IllegalArgumentException
1306      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1307      *          {@link ImageFormat#JPEG}
1308      *
1309      * @see ImageFormat
1310      * @see PixelFormat
1311      * @see #checkArgumentFormat
1312      * @hide
1313      */
depthFormatToPublic(int format)1314     public static int depthFormatToPublic(int format) {
1315         switch (format) {
1316             case HAL_PIXEL_FORMAT_BLOB:
1317                 return ImageFormat.DEPTH_POINT_CLOUD;
1318             case HAL_PIXEL_FORMAT_Y16:
1319                 return ImageFormat.DEPTH16;
1320             case HAL_PIXEL_FORMAT_RAW16:
1321                 return ImageFormat.RAW_DEPTH;
1322             case ImageFormat.JPEG:
1323                 throw new IllegalArgumentException(
1324                         "ImageFormat.JPEG is an unknown internal format");
1325             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1326                 throw new IllegalArgumentException(
1327                         "IMPLEMENTATION_DEFINED must not leak to public API");
1328             default:
1329                 throw new IllegalArgumentException(
1330                         "Unknown DATASPACE_DEPTH format " + format);
1331         }
1332     }
1333 
1334     /**
1335      * Convert image formats from internal to public formats (in-place).
1336      *
1337      * @param formats an array of image formats
1338      * @return {@code formats}
1339      *
1340      * @see #imageFormatToPublic
1341      */
imageFormatToPublic(int[] formats)1342     static int[] imageFormatToPublic(int[] formats) {
1343         if (formats == null) {
1344             return null;
1345         }
1346 
1347         for (int i = 0; i < formats.length; ++i) {
1348             formats[i] = imageFormatToPublic(formats[i]);
1349         }
1350 
1351         return formats;
1352     }
1353 
1354     /**
1355      * Convert a public format compatible with {@code ImageFormat} to an internal format
1356      * from {@code graphics.h}.
1357      *
1358      * <p>In particular these formats are converted:
1359      * <ul>
1360      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1361      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1362      * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
1363      * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
1364      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1365      * </ul>
1366      * </p>
1367      *
1368      * <p>Passing in an internal format which has a different public format equivalent will fail.
1369      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1370      *
1371      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1372      *
1373      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1374      *
1375      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1376      * @return the converted image formats
1377      *
1378      * @see ImageFormat
1379      * @see PixelFormat
1380      *
1381      * @throws IllegalArgumentException
1382      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1383      */
imageFormatToInternal(int format)1384     static int imageFormatToInternal(int format) {
1385         switch (format) {
1386             case ImageFormat.JPEG:
1387             case ImageFormat.DEPTH_POINT_CLOUD:
1388             case ImageFormat.DEPTH_JPEG:
1389             case ImageFormat.HEIC:
1390                 return HAL_PIXEL_FORMAT_BLOB;
1391             case ImageFormat.DEPTH16:
1392                 return HAL_PIXEL_FORMAT_Y16;
1393             case ImageFormat.RAW_DEPTH:
1394                 return HAL_PIXEL_FORMAT_RAW16;
1395             default:
1396                 return format;
1397         }
1398     }
1399 
1400     /**
1401      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1402      * from {@code graphics.h}.
1403      *
1404      * <p>In particular these formats are converted:
1405      * <ul>
1406      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1407      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1408      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1409      * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
1410      * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
1411      * <li>others => HAL_DATASPACE_UNKNOWN
1412      * </ul>
1413      * </p>
1414      *
1415      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1416      * as will passing in an internal format which has a different public format equivalent.
1417      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1418      *
1419      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1420      *
1421      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1422      *
1423      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1424      * @return the converted image formats
1425      *
1426      * @see ImageFormat
1427      * @see PixelFormat
1428      *
1429      * @throws IllegalArgumentException
1430      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1431      */
imageFormatToDataspace(int format)1432     static int imageFormatToDataspace(int format) {
1433         switch (format) {
1434             case ImageFormat.JPEG:
1435                 return HAL_DATASPACE_V0_JFIF;
1436             case ImageFormat.DEPTH_POINT_CLOUD:
1437             case ImageFormat.DEPTH16:
1438             case ImageFormat.RAW_DEPTH:
1439                 return HAL_DATASPACE_DEPTH;
1440             case ImageFormat.DEPTH_JPEG:
1441                 return HAL_DATASPACE_DYNAMIC_DEPTH;
1442             case ImageFormat.HEIC:
1443                 return HAL_DATASPACE_HEIF;
1444             default:
1445                 return HAL_DATASPACE_UNKNOWN;
1446         }
1447     }
1448 
1449     /**
1450      * Convert image formats from public to internal formats (in-place).
1451      *
1452      * @param formats an array of image formats
1453      * @return {@code formats}
1454      *
1455      * @see #imageFormatToInternal
1456      *
1457      * @hide
1458      */
imageFormatToInternal(int[] formats)1459     public static int[] imageFormatToInternal(int[] formats) {
1460         if (formats == null) {
1461             return null;
1462         }
1463 
1464         for (int i = 0; i < formats.length; ++i) {
1465             formats[i] = imageFormatToInternal(formats[i]);
1466         }
1467 
1468         return formats;
1469     }
1470 
getPublicFormatSizes(int format, boolean output, boolean highRes)1471     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1472         try {
1473             checkArgumentFormatSupported(format, output);
1474         } catch (IllegalArgumentException e) {
1475             return null;
1476         }
1477 
1478         int internalFormat = imageFormatToInternal(format);
1479         int dataspace = imageFormatToDataspace(format);
1480 
1481         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1482     }
1483 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1484     private Size[] getInternalFormatSizes(int format, int dataspace,
1485             boolean output, boolean highRes) {
1486         // All depth formats are non-high-res.
1487         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1488             return new Size[0];
1489         }
1490 
1491         SparseIntArray formatsMap =
1492                 !output ? mInputFormats :
1493                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1494                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
1495                 dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
1496                 highRes ? mHighResOutputFormats :
1497                 mOutputFormats;
1498 
1499         int sizesCount = formatsMap.get(format);
1500         if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH ||
1501                             dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1502                             dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) ||
1503                 (output && (dataspace != HAL_DATASPACE_DEPTH &&
1504                             dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
1505                             dataspace != HAL_DATASPACE_HEIF) &&
1506                  mAllOutputFormats.get(format) == 0)) {
1507             return null;
1508         }
1509 
1510         Size[] sizes = new Size[sizesCount];
1511         int sizeIndex = 0;
1512 
1513         StreamConfiguration[] configurations =
1514                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1515                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1516                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1517                 mConfigurations;
1518         StreamConfigurationDuration[] minFrameDurations =
1519                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1520                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
1521                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1522                 mMinFrameDurations;
1523 
1524         for (StreamConfiguration config : configurations) {
1525             int fmt = config.getFormat();
1526             if (fmt == format && config.isOutput() == output) {
1527                 if (output && mListHighResolution) {
1528                     // Filter slow high-res output formats; include for
1529                     // highRes, remove for !highRes
1530                     long duration = 0;
1531                     for (int i = 0; i < minFrameDurations.length; i++) {
1532                         StreamConfigurationDuration d = minFrameDurations[i];
1533                         if (d.getFormat() == fmt &&
1534                                 d.getWidth() == config.getSize().getWidth() &&
1535                                 d.getHeight() == config.getSize().getHeight()) {
1536                             duration = d.getDuration();
1537                             break;
1538                         }
1539                     }
1540                     if (dataspace != HAL_DATASPACE_DEPTH &&
1541                             highRes != (duration > DURATION_20FPS_NS)) {
1542                         continue;
1543                     }
1544                 }
1545                 sizes[sizeIndex++] = config.getSize();
1546             }
1547         }
1548 
1549         // Dynamic depth streams can have both fast and also high res modes.
1550         if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1551                 dataspace == HAL_DATASPACE_HEIF)) {
1552 
1553             if (sizeIndex > sizesCount) {
1554                 throw new AssertionError(
1555                         "Too many dynamic depth sizes (expected " + sizesCount + ", actual " +
1556                         sizeIndex + ")");
1557             }
1558 
1559             if (sizeIndex <= 0) {
1560                 sizes = new Size[0];
1561             } else {
1562                 sizes = Arrays.copyOf(sizes, sizeIndex);
1563             }
1564         } else if (sizeIndex != sizesCount) {
1565             throw new AssertionError(
1566                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1567         }
1568 
1569         return sizes;
1570     }
1571 
1572     /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
getPublicFormats(boolean output)1573     private int[] getPublicFormats(boolean output) {
1574         int[] formats = new int[getPublicFormatCount(output)];
1575 
1576         int i = 0;
1577 
1578         SparseIntArray map = getFormatsMap(output);
1579         for (int j = 0; j < map.size(); j++) {
1580             int format = map.keyAt(j);
1581             formats[i++] = imageFormatToPublic(format);
1582         }
1583         if (output) {
1584             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1585                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1586             }
1587             if (mDynamicDepthOutputFormats.size() > 0) {
1588                 // Only one publicly dynamic depth format is available.
1589                 formats[i++] = ImageFormat.DEPTH_JPEG;
1590             }
1591             if (mHeicOutputFormats.size() > 0) {
1592                 formats[i++] = ImageFormat.HEIC;
1593             }
1594         }
1595         if (formats.length != i) {
1596             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1597         }
1598 
1599         return formats;
1600     }
1601 
1602     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1603     private SparseIntArray getFormatsMap(boolean output) {
1604         return output ? mAllOutputFormats : mInputFormats;
1605     }
1606 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1607     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1608         // assume format is already checked, since its internal
1609 
1610         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1611             throw new IllegalArgumentException("size was not supported");
1612         }
1613 
1614         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1615 
1616         for (StreamConfigurationDuration configurationDuration : durations) {
1617             if (configurationDuration.getFormat() == format &&
1618                     configurationDuration.getWidth() == size.getWidth() &&
1619                     configurationDuration.getHeight() == size.getHeight()) {
1620                 return configurationDuration.getDuration();
1621             }
1622         }
1623         // Default duration is '0' (unsupported/no extra stall)
1624         return 0;
1625     }
1626 
1627     /**
1628      * Get the durations array for the kind of duration
1629      *
1630      * @see #DURATION_MIN_FRAME
1631      * @see #DURATION_STALL
1632      * */
getDurations(int duration, int dataspace)1633     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1634         switch (duration) {
1635             case DURATION_MIN_FRAME:
1636                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1637                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
1638                         mDynamicDepthMinFrameDurations :
1639                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1640                         mMinFrameDurations;
1641 
1642             case DURATION_STALL:
1643                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
1644                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
1645                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
1646                         mStallDurations;
1647             default:
1648                 throw new IllegalArgumentException("duration was invalid");
1649         }
1650     }
1651 
1652     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1653     private int getPublicFormatCount(boolean output) {
1654         SparseIntArray formatsMap = getFormatsMap(output);
1655         int size = formatsMap.size();
1656         if (output) {
1657             size += mDepthOutputFormats.size();
1658             size += mDynamicDepthOutputFormats.size();
1659             size += mHeicOutputFormats.size();
1660         }
1661 
1662         return size;
1663     }
1664 
arrayContains(T[] array, T element)1665     private static <T> boolean arrayContains(T[] array, T element) {
1666         if (array == null) {
1667             return false;
1668         }
1669 
1670         for (T el : array) {
1671             if (Objects.equals(el, element)) {
1672                 return true;
1673             }
1674         }
1675 
1676         return false;
1677     }
1678 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1679     private boolean isSupportedInternalConfiguration(int format, int dataspace, Size size) {
1680         StreamConfiguration[] configurations =
1681                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1682                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1683                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1684                 mConfigurations;
1685 
1686         for (int i = 0; i < configurations.length; i++) {
1687             if (configurations[i].getFormat() == format &&
1688                     configurations[i].getSize().equals(size)) {
1689                 return true;
1690             }
1691         }
1692 
1693         return false;
1694     }
1695 
1696     /**
1697      * Return this {@link StreamConfigurationMap} as a string representation.
1698      *
1699      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1700      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1701      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1702      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1703      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1704      *
1705      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1706      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1707      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1708      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1709      * duration in nanoseconds.</p>
1710      *
1711      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1712      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1713      * format.</p>
1714      *
1715      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1716      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1717      * represents an input fomat and its valid output formats.</p>
1718      *
1719      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1720      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1721      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1722      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1723      *
1724      * @return string representation of {@link StreamConfigurationMap}
1725      */
1726     @Override
toString()1727     public String toString() {
1728         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1729         appendOutputsString(sb);
1730         sb.append(", ");
1731         appendHighResOutputsString(sb);
1732         sb.append(", ");
1733         appendInputsString(sb);
1734         sb.append(", ");
1735         appendValidOutputFormatsForInputString(sb);
1736         sb.append(", ");
1737         appendHighSpeedVideoConfigurationsString(sb);
1738         sb.append(")");
1739 
1740         return sb.toString();
1741     }
1742 
appendOutputsString(StringBuilder sb)1743     private void appendOutputsString(StringBuilder sb) {
1744         sb.append("Outputs(");
1745         int[] formats = getOutputFormats();
1746         for (int format : formats) {
1747             Size[] sizes = getOutputSizes(format);
1748             for (Size size : sizes) {
1749                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1750                 long stallDuration = getOutputStallDuration(format, size);
1751                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1752                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1753                         format, minFrameDuration, stallDuration));
1754             }
1755         }
1756         // Remove the pending ", "
1757         if (sb.charAt(sb.length() - 1) == ' ') {
1758             sb.delete(sb.length() - 2, sb.length());
1759         }
1760         sb.append(")");
1761     }
1762 
appendHighResOutputsString(StringBuilder sb)1763     private void appendHighResOutputsString(StringBuilder sb) {
1764         sb.append("HighResolutionOutputs(");
1765         int[] formats = getOutputFormats();
1766         for (int format : formats) {
1767             Size[] sizes = getHighResolutionOutputSizes(format);
1768             if (sizes == null) continue;
1769             for (Size size : sizes) {
1770                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1771                 long stallDuration = getOutputStallDuration(format, size);
1772                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1773                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1774                         format, minFrameDuration, stallDuration));
1775             }
1776         }
1777         // Remove the pending ", "
1778         if (sb.charAt(sb.length() - 1) == ' ') {
1779             sb.delete(sb.length() - 2, sb.length());
1780         }
1781         sb.append(")");
1782     }
1783 
appendInputsString(StringBuilder sb)1784     private void appendInputsString(StringBuilder sb) {
1785         sb.append("Inputs(");
1786         int[] formats = getInputFormats();
1787         for (int format : formats) {
1788             Size[] sizes = getInputSizes(format);
1789             for (Size size : sizes) {
1790                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1791                         size.getHeight(), formatToString(format), format));
1792             }
1793         }
1794         // Remove the pending ", "
1795         if (sb.charAt(sb.length() - 1) == ' ') {
1796             sb.delete(sb.length() - 2, sb.length());
1797         }
1798         sb.append(")");
1799     }
1800 
appendValidOutputFormatsForInputString(StringBuilder sb)1801     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1802         sb.append("ValidOutputFormatsForInput(");
1803         int[] inputFormats = getInputFormats();
1804         for (int inputFormat : inputFormats) {
1805             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1806             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1807             for (int i = 0; i < outputFormats.length; i++) {
1808                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1809                         outputFormats[i]));
1810                 if (i < outputFormats.length - 1) {
1811                     sb.append(", ");
1812                 }
1813             }
1814             sb.append("], ");
1815         }
1816         // Remove the pending ", "
1817         if (sb.charAt(sb.length() - 1) == ' ') {
1818             sb.delete(sb.length() - 2, sb.length());
1819         }
1820         sb.append(")");
1821     }
1822 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)1823     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1824         sb.append("HighSpeedVideoConfigurations(");
1825         Size[] sizes = getHighSpeedVideoSizes();
1826         for (Size size : sizes) {
1827             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1828             for (Range<Integer> range : ranges) {
1829                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1830                         size.getHeight(), range.getLower(), range.getUpper()));
1831             }
1832         }
1833         // Remove the pending ", "
1834         if (sb.charAt(sb.length() - 1) == ' ') {
1835             sb.delete(sb.length() - 2, sb.length());
1836         }
1837         sb.append(")");
1838     }
1839 
formatToString(int format)1840     private String formatToString(int format) {
1841         switch (format) {
1842             case ImageFormat.YV12:
1843                 return "YV12";
1844             case ImageFormat.YUV_420_888:
1845                 return "YUV_420_888";
1846             case ImageFormat.NV21:
1847                 return "NV21";
1848             case ImageFormat.NV16:
1849                 return "NV16";
1850             case PixelFormat.RGB_565:
1851                 return "RGB_565";
1852             case PixelFormat.RGBA_8888:
1853                 return "RGBA_8888";
1854             case PixelFormat.RGBX_8888:
1855                 return "RGBX_8888";
1856             case PixelFormat.RGB_888:
1857                 return "RGB_888";
1858             case ImageFormat.JPEG:
1859                 return "JPEG";
1860             case ImageFormat.YUY2:
1861                 return "YUY2";
1862             case ImageFormat.Y8:
1863                 return "Y8";
1864             case ImageFormat.Y16:
1865                 return "Y16";
1866             case ImageFormat.RAW_SENSOR:
1867                 return "RAW_SENSOR";
1868             case ImageFormat.RAW_PRIVATE:
1869                 return "RAW_PRIVATE";
1870             case ImageFormat.RAW10:
1871                 return "RAW10";
1872             case ImageFormat.DEPTH16:
1873                 return "DEPTH16";
1874             case ImageFormat.DEPTH_POINT_CLOUD:
1875                 return "DEPTH_POINT_CLOUD";
1876             case ImageFormat.DEPTH_JPEG:
1877                 return "DEPTH_JPEG";
1878             case ImageFormat.RAW_DEPTH:
1879                 return "RAW_DEPTH";
1880             case ImageFormat.PRIVATE:
1881                 return "PRIVATE";
1882             case ImageFormat.HEIC:
1883                 return "HEIC";
1884             default:
1885                 return "UNKNOWN";
1886         }
1887     }
1888 
1889     // from system/core/include/system/graphics.h
1890     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
1891     private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1892     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1893     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
1894     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1895     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
1896     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
1897     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
1898 
1899 
1900     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
1901     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
1902     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
1903 
1904     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
1905     private static final int HAL_DATASPACE_V0_JFIF =
1906             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
1907             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
1908             (1 << HAL_DATASPACE_RANGE_SHIFT);
1909 
1910     private static final int HAL_DATASPACE_DEPTH = 0x1000;
1911     private static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
1912     private static final int HAL_DATASPACE_HEIF = 0x1003;
1913     private static final long DURATION_20FPS_NS = 50000000L;
1914     /**
1915      * @see #getDurations(int, int)
1916      */
1917     private static final int DURATION_MIN_FRAME = 0;
1918     private static final int DURATION_STALL = 1;
1919 
1920     private final StreamConfiguration[] mConfigurations;
1921     private final StreamConfigurationDuration[] mMinFrameDurations;
1922     private final StreamConfigurationDuration[] mStallDurations;
1923 
1924     private final StreamConfiguration[] mDepthConfigurations;
1925     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
1926     private final StreamConfigurationDuration[] mDepthStallDurations;
1927 
1928     private final StreamConfiguration[] mDynamicDepthConfigurations;
1929     private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
1930     private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
1931 
1932     private final StreamConfiguration[] mHeicConfigurations;
1933     private final StreamConfigurationDuration[] mHeicMinFrameDurations;
1934     private final StreamConfigurationDuration[] mHeicStallDurations;
1935 
1936     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
1937     private final ReprocessFormatsMap mInputOutputFormatsMap;
1938 
1939     private final boolean mListHighResolution;
1940 
1941     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
1942      * non-depth dataspaces */
1943     private final SparseIntArray mOutputFormats = new SparseIntArray();
1944     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
1945      * dataspaces */
1946     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
1947     /** internal format -> num output sizes mapping for all non-depth dataspaces */
1948     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
1949     /** internal format -> num input sizes mapping, for input reprocessing formats */
1950     private final SparseIntArray mInputFormats = new SparseIntArray();
1951     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
1952     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
1953     /** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
1954     private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
1955     /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
1956     private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
1957 
1958     /** High speed video Size -> FPS range count mapping*/
1959     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
1960             new HashMap<Size, Integer>();
1961     /** High speed video FPS range -> Size count mapping*/
1962     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
1963             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
1964 
1965 }
1966