1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import android.graphics.ImageFormat;
20 import android.graphics.PixelFormat;
21 import android.hardware.camera2.CameraCharacteristics;
22 import android.hardware.camera2.CameraDevice;
23 import android.hardware.camera2.CameraMetadata;
24 import android.hardware.camera2.CaptureRequest;
25 import android.hardware.camera2.utils.HashCodeHelpers;
26 import android.hardware.camera2.utils.SurfaceUtils;
27 import android.hardware.camera2.legacy.LegacyCameraDevice;
28 import android.hardware.camera2.legacy.LegacyMetadataMapper;
29 import android.view.Surface;
30 import android.util.Range;
31 import android.util.Size;
32 import android.util.SparseIntArray;
33 
34 import java.util.Arrays;
35 import java.util.HashMap;
36 import java.util.Objects;
37 import java.util.Set;
38 
39 import static com.android.internal.util.Preconditions.*;
40 
41 /**
42  * Immutable class to store the available stream
43  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
44  * {@link android.view.Surface Surfaces} for creating a
45  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
46  * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
47  * <!-- TODO: link to input stream configuration -->
48  *
49  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
50  * for that format) that are supported by a camera device.</p>
51  *
52  * <p>This also contains the minimum frame durations and stall durations for each format/size
53  * combination that can be used to calculate effective frame rate when submitting multiple captures.
54  * </p>
55  *
56  * <p>An instance of this object is available from {@link CameraCharacteristics} using
57  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
58  * {@link CameraCharacteristics#get} method.</p>
59  *
60  * <pre><code>{@code
61  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
62  * StreamConfigurationMap configs = characteristics.get(
63  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
64  * }</code></pre>
65  *
66  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
67  * @see CameraDevice#createCaptureSession
68  */
69 public final class StreamConfigurationMap {
70 
71     private static final String TAG = "StreamConfigurationMap";
72 
73     /**
74      * Create a new {@link StreamConfigurationMap}.
75      *
76      * <p>The array parameters ownership is passed to this object after creation; do not
77      * write to them after this constructor is invoked.</p>
78      *
79      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
80      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
82      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
83      *        camera device does not support high speed video recording
84      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
85      *        and thus needs a separate list of slow high-resolution output sizes
86      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
87      *         were {@code null} or any subelements were {@code null}
88      *
89      * @hide
90      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)91     public StreamConfigurationMap(
92             StreamConfiguration[] configurations,
93             StreamConfigurationDuration[] minFrameDurations,
94             StreamConfigurationDuration[] stallDurations,
95             StreamConfiguration[] depthConfigurations,
96             StreamConfigurationDuration[] depthMinFrameDurations,
97             StreamConfigurationDuration[] depthStallDurations,
98             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
99             ReprocessFormatsMap inputOutputFormatsMap,
100             boolean listHighResolution) {
101 
102         if (configurations == null) {
103             // If no color configurations exist, ensure depth ones do
104             checkArrayElementsNotNull(depthConfigurations, "depthConfigurations");
105             mConfigurations = new StreamConfiguration[0];
106             mMinFrameDurations = new StreamConfigurationDuration[0];
107             mStallDurations = new StreamConfigurationDuration[0];
108         } else {
109             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
110             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
111             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
112         }
113 
114         mListHighResolution = listHighResolution;
115 
116         if (depthConfigurations == null) {
117             mDepthConfigurations = new StreamConfiguration[0];
118             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
119             mDepthStallDurations = new StreamConfigurationDuration[0];
120         } else {
121             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
122                     "depthConfigurations");
123             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
124                     "depthMinFrameDurations");
125             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
126                     "depthStallDurations");
127         }
128 
129         if (highSpeedVideoConfigurations == null) {
130             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
131         } else {
132             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
133                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
134         }
135 
136         // For each format, track how many sizes there are available to configure
137         for (StreamConfiguration config : mConfigurations) {
138             int fmt = config.getFormat();
139             SparseIntArray map = null;
140             if (config.isOutput()) {
141                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
142                 long duration = 0;
143                 if (mListHighResolution) {
144                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
145                         if (configurationDuration.getFormat() == fmt &&
146                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
147                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
148                             duration = configurationDuration.getDuration();
149                             break;
150                         }
151                     }
152                 }
153                 map = duration <= DURATION_20FPS_NS ?
154                         mOutputFormats : mHighResOutputFormats;
155             } else {
156                 map = mInputFormats;
157             }
158             map.put(fmt, map.get(fmt) + 1);
159         }
160 
161         // For each depth format, track how many sizes there are available to configure
162         for (StreamConfiguration config : mDepthConfigurations) {
163             if (!config.isOutput()) {
164                 // Ignoring input depth configs
165                 continue;
166             }
167 
168             mDepthOutputFormats.put(config.getFormat(),
169                     mDepthOutputFormats.get(config.getFormat()) + 1);
170         }
171 
172         if (configurations != null &&
173                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
174             throw new AssertionError(
175                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
176         }
177 
178         // For each Size/FPS range, track how many FPS range/Size there are available
179         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
180             Size size = config.getSize();
181             Range<Integer> fpsRange = config.getFpsRange();
182             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
183             if (fpsRangeCount == null) {
184                 fpsRangeCount = 0;
185             }
186             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
187             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
188             if (sizeCount == null) {
189                 sizeCount = 0;
190             }
191             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
192         }
193 
194         mInputOutputFormatsMap = inputOutputFormatsMap;
195     }
196 
197     /**
198      * Get the image {@code format} output formats in this stream configuration.
199      *
200      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
201      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
202      *
203      * <p>Formats listed in this array are guaranteed to return true if queried with
204      * {@link #isOutputSupportedFor(int)}.</p>
205      *
206      * @return an array of integer format
207      *
208      * @see ImageFormat
209      * @see PixelFormat
210      */
getOutputFormats()211     public final int[] getOutputFormats() {
212         return getPublicFormats(/*output*/true);
213     }
214 
215     /**
216      * Get the image {@code format} output formats for a reprocessing input format.
217      *
218      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
219      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
220      * listed in the return value of this method. Including any other output Surface as a target
221      * will throw an IllegalArgumentException. If no output format is supported given the input
222      * format, an empty int[] will be returned.</p>
223      *
224      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
225      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
226      *
227      * <p>Formats listed in this array are guaranteed to return true if queried with
228      * {@link #isOutputSupportedFor(int)}.</p>
229      *
230      * @return an array of integer format
231      *
232      * @see ImageFormat
233      * @see PixelFormat
234      */
getValidOutputFormatsForInput(int inputFormat)235     public final int[] getValidOutputFormatsForInput(int inputFormat) {
236         if (mInputOutputFormatsMap == null) {
237             return new int[0];
238         }
239         return mInputOutputFormatsMap.getOutputs(inputFormat);
240     }
241 
242     /**
243      * Get the image {@code format} input formats in this stream configuration.
244      *
245      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
246      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
247      *
248      * @return an array of integer format
249      *
250      * @see ImageFormat
251      * @see PixelFormat
252      */
getInputFormats()253     public final int[] getInputFormats() {
254         return getPublicFormats(/*output*/false);
255     }
256 
257     /**
258      * Get the supported input sizes for this input format.
259      *
260      * <p>The format must have come from {@link #getInputFormats}; otherwise
261      * {@code null} is returned.</p>
262      *
263      * @param format a format from {@link #getInputFormats}
264      * @return a non-empty array of sizes, or {@code null} if the format was not available.
265      */
getInputSizes(final int format)266     public Size[] getInputSizes(final int format) {
267         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
268     }
269 
270     /**
271      * Determine whether or not output surfaces with a particular user-defined format can be passed
272      * {@link CameraDevice#createCaptureSession createCaptureSession}.
273      *
274      * <p>This method determines that the output {@code format} is supported by the camera device;
275      * each output {@code surface} target may or may not itself support that {@code format}.
276      * Refer to the class which provides the surface for additional documentation.</p>
277      *
278      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
279      * returned by {@link #getOutputSizes}.</p>
280      *
281      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
282      * @return
283      *          {@code true} iff using a {@code surface} with this {@code format} will be
284      *          supported with {@link CameraDevice#createCaptureSession}
285      *
286      * @throws IllegalArgumentException
287      *          if the image format was not a defined named constant
288      *          from either {@link ImageFormat} or {@link PixelFormat}
289      *
290      * @see ImageFormat
291      * @see PixelFormat
292      * @see CameraDevice#createCaptureSession
293      */
isOutputSupportedFor(int format)294     public boolean isOutputSupportedFor(int format) {
295         checkArgumentFormat(format);
296 
297         int internalFormat = imageFormatToInternal(format);
298         int dataspace = imageFormatToDataspace(format);
299         if (dataspace == HAL_DATASPACE_DEPTH) {
300             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
301         } else {
302             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
303         }
304     }
305 
306     /**
307      * Determine whether or not output streams can be configured with a particular class
308      * as a consumer.
309      *
310      * <p>The following list is generally usable for outputs:
311      * <ul>
312      * <li>{@link android.media.ImageReader} -
313      * Recommended for image processing or streaming to external resources (such as a file or
314      * network)
315      * <li>{@link android.media.MediaRecorder} -
316      * Recommended for recording video (simple to use)
317      * <li>{@link android.media.MediaCodec} -
318      * Recommended for recording video (more complicated to use, with more flexibility)
319      * <li>{@link android.renderscript.Allocation} -
320      * Recommended for image processing with {@link android.renderscript RenderScript}
321      * <li>{@link android.view.SurfaceHolder} -
322      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
323      * <li>{@link android.graphics.SurfaceTexture} -
324      * Recommended for OpenGL-accelerated preview processing or compositing with
325      * {@link android.view.TextureView}
326      * </ul>
327      * </p>
328      *
329      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
330      * provide a producer endpoint that is suitable to be used with
331      * {@link CameraDevice#createCaptureSession}.</p>
332      *
333      * <p>Since not all of the above classes support output of all format and size combinations,
334      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
335      *
336      * @param klass a non-{@code null} {@link Class} object reference
337      * @return {@code true} if this class is supported as an output, {@code false} otherwise
338      *
339      * @throws NullPointerException if {@code klass} was {@code null}
340      *
341      * @see CameraDevice#createCaptureSession
342      * @see #isOutputSupportedFor(Surface)
343      */
isOutputSupportedFor(Class<T> klass)344     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
345         checkNotNull(klass, "klass must not be null");
346 
347         if (klass == android.media.ImageReader.class) {
348             return true;
349         } else if (klass == android.media.MediaRecorder.class) {
350             return true;
351         } else if (klass == android.media.MediaCodec.class) {
352             return true;
353         } else if (klass == android.renderscript.Allocation.class) {
354             return true;
355         } else if (klass == android.view.SurfaceHolder.class) {
356             return true;
357         } else if (klass == android.graphics.SurfaceTexture.class) {
358             return true;
359         }
360 
361         return false;
362     }
363 
364     /**
365      * Determine whether or not the {@code surface} in its current state is suitable to be included
366      * in a {@link CameraDevice#createCaptureSession capture session} as an output.
367      *
368      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
369      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
370      * compatible with the {@link CameraDevice} in general
371      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
372      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
373      *
374      * <p>Reasons for a {@code surface} being specifically incompatible might be:
375      * <ul>
376      * <li>Using a format that's not listed by {@link #getOutputFormats}
377      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
378      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
379      * </li>
380      * </ul>
381      *
382      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
383      * not match a camera-supported size, as long as the format (or class) is supported and the
384      * camera device supports a size that is equal to or less than 1080p in that format. If such as
385      * Surface is used to create a capture session, it will have its size rounded to the nearest
386      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
387      * and ImageReader.</p>
388      *
389      * <p>This is not an exhaustive list; see the particular class's documentation for further
390      * possible reasons of incompatibility.</p>
391      *
392      * @param surface a non-{@code null} {@link Surface} object reference
393      * @return {@code true} if this is supported, {@code false} otherwise
394      *
395      * @throws NullPointerException if {@code surface} was {@code null}
396      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
397      *
398      * @see CameraDevice#createCaptureSession
399      * @see #isOutputSupportedFor(Class)
400      */
isOutputSupportedFor(Surface surface)401     public boolean isOutputSupportedFor(Surface surface) {
402         checkNotNull(surface, "surface must not be null");
403 
404         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
405         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
406         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
407 
408         // See if consumer is flexible.
409         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
410 
411         // Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482
412         if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
413                         surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
414             surfaceFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
415         }
416 
417         StreamConfiguration[] configs =
418                 surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
419         for (StreamConfiguration config : configs) {
420             if (config.getFormat() == surfaceFormat && config.isOutput()) {
421                 // Matching format, either need exact size match, or a flexible consumer
422                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
423                 if (config.getSize().equals(surfaceSize)) {
424                     return true;
425                 } else if (isFlexible &&
426                         (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
427                     return true;
428                 }
429             }
430         }
431         return false;
432     }
433 
434     /**
435      * Get a list of sizes compatible with {@code klass} to use as an output.
436      *
437      * <p>Some of the supported classes may support additional formats beyond
438      * {@link ImageFormat#PRIVATE}; this function only returns
439      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
440      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
441      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
442      * class.</p>
443      *
444      * <p>If a well-defined format such as {@code NV21} is required, use
445      * {@link #getOutputSizes(int)} instead.</p>
446      *
447      * <p>The {@code klass} should be a supported output, that querying
448      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
449      *
450      * @param klass
451      *          a non-{@code null} {@link Class} object reference
452      * @return
453      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
454      *          or {@code null} iff the {@code klass} is not a supported output.
455      *
456      *
457      * @throws NullPointerException if {@code klass} was {@code null}
458      *
459      * @see #isOutputSupportedFor(Class)
460      */
getOutputSizes(Class<T> klass)461     public <T> Size[] getOutputSizes(Class<T> klass) {
462         if (isOutputSupportedFor(klass) == false) {
463             return null;
464         }
465 
466         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
467                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
468     }
469 
470     /**
471      * Get a list of sizes compatible with the requested image {@code format}.
472      *
473      * <p>The {@code format} should be a supported format (one of the formats returned by
474      * {@link #getOutputFormats}).</p>
475      *
476      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
477      * that support the
478      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
479      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
480      * 20fps rate. This means that for some supported formats, this method will return an empty
481      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
482      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
483      *
484      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
485      * @return
486      *          an array of supported sizes,
487      *          or {@code null} if the {@code format} is not a supported output
488      *
489      * @see ImageFormat
490      * @see PixelFormat
491      * @see #getOutputFormats
492      */
getOutputSizes(int format)493     public Size[] getOutputSizes(int format) {
494         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
495     }
496 
497     /**
498      * Get a list of supported high speed video recording sizes.
499      * <p>
500      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
501      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
502      * list the supported high speed video size configurations. All the sizes listed will be a
503      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
504      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
505      * </p>
506      * <p>
507      * To enable high speed video recording, application must create a constrained create high speed
508      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
509      * a CaptureRequest list created by
510      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
511      * to this session. The application must select the video size from this method and
512      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
513      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
514      * generate the high speed request list. For example, if the application intends to do high
515      * speed recording, it can select the maximum size reported by this method to create high speed
516      * capture session. Note that for the use case of multiple output streams, application must
517      * select one unique size from this method to use (e.g., preview and recording streams must have
518      * the same size). Otherwise, the high speed session creation will fail. Once the size is
519      * selected, application can get the supported FPS ranges by
520      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
521      * request lists via
522      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
523      * </p>
524      *
525      * @return an array of supported high speed video recording sizes
526      * @see #getHighSpeedVideoFpsRangesFor(Size)
527      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
528      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
529      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
530      */
getHighSpeedVideoSizes()531     public Size[] getHighSpeedVideoSizes() {
532         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
533         return keySet.toArray(new Size[keySet.size()]);
534     }
535 
536     /**
537      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
538      * <p>
539      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
540      * </p>
541      * <p>
542      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
543      * must not be used to setup capture requests that are submitted to unconstrained capture
544      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
545      * </p>
546      * <p>
547      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
548      * </p>
549      *
550      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
551      * @return an array of supported high speed video recording FPS ranges The upper bound of
552      *         returned ranges is guaranteed to be greater than or equal to 120.
553      * @throws IllegalArgumentException if input size does not exist in the return value of
554      *             getHighSpeedVideoSizes
555      * @see #getHighSpeedVideoSizes()
556      * @see #getHighSpeedVideoFpsRanges()
557      */
getHighSpeedVideoFpsRangesFor(Size size)558     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
559         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
560         if (fpsRangeCount == null || fpsRangeCount == 0) {
561             throw new IllegalArgumentException(String.format(
562                     "Size %s does not support high speed video recording", size));
563         }
564 
565         @SuppressWarnings("unchecked")
566         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
567         int i = 0;
568         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
569             if (size.equals(config.getSize())) {
570                 fpsRanges[i++] = config.getFpsRange();
571             }
572         }
573         return fpsRanges;
574     }
575 
576     /**
577      * Get a list of supported high speed video recording FPS ranges.
578      * <p>
579      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
580      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
581      * list the supported high speed video FPS range configurations. Application can then use
582      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
583      * </p>
584      * <p>
585      * To enable high speed video recording, application must create a constrained create high speed
586      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
587      * a CaptureRequest list created by
588      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
589      * to this session. The application must select the video size from this method and
590      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
591      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
592      * generate the high speed request list. For example, if the application intends to do high
593      * speed recording, it can select one FPS range reported by this method, query the video sizes
594      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
595      * sizes to create a high speed capture session. Note that for the use case of multiple output
596      * streams, application must select one unique size from this method to use (e.g., preview and
597      * recording streams must have the same size). Otherwise, the high speed session creation will
598      * fail. Once the high speed capture session is created, the application can set the FPS range
599      * in the recording request lists via
600      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
601      * </p>
602      * <p>
603      * The FPS ranges reported by this method will have below characteristics:
604      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
605      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
606      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
607      * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30,
608      * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the
609      * application doesn't want the camera device always produce higher frame rate than the display
610      * refresh rate.</li>
611      * </p>
612      *
613      * @return an array of supported high speed video recording FPS ranges The upper bound of
614      *         returned ranges is guaranteed to be larger or equal to 120.
615      * @see #getHighSpeedVideoSizesFor
616      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
617      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
618      * @see CameraDevice#createHighSpeedRequestList
619      */
620     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()621     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
622         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
623         return keySet.toArray(new Range[keySet.size()]);
624     }
625 
626     /**
627      * Get the supported video sizes for an input high speed FPS range.
628      *
629      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
630      *
631      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
632      * @return An array of video sizes to create high speed capture sessions for high speed streaming
633      *         use cases.
634      *
635      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
636      *         getHighSpeedVideoFpsRanges
637      * @see #getHighSpeedVideoFpsRanges()
638      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)639     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
640         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
641         if (sizeCount == null || sizeCount == 0) {
642             throw new IllegalArgumentException(String.format(
643                     "FpsRange %s does not support high speed video recording", fpsRange));
644         }
645 
646         Size[] sizes = new Size[sizeCount];
647         int i = 0;
648         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
649             if (fpsRange.equals(config.getFpsRange())) {
650                 sizes[i++] = config.getSize();
651             }
652         }
653         return sizes;
654     }
655 
656     /**
657      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
658      * rate.
659      *
660      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
661      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
662      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
663      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
664      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are
665      * still guaranteed to operate at a rate of at least 10 fps, not including stall duration.</p>
666      *
667      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
668      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
669      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
670      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
671      * fps requirement.</p>
672      *
673      * @return an array of supported slower high-resolution sizes, or {@code null} if the
674      *         BURST_CAPTURE capability is not supported
675      */
getHighResolutionOutputSizes(int format)676     public Size[] getHighResolutionOutputSizes(int format) {
677         if (!mListHighResolution) return null;
678 
679         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
680     }
681 
682     /**
683      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
684      * for the format/size combination (in nanoseconds).
685      *
686      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
687      * <p>{@code size} should be one of the ones returned by
688      * {@link #getOutputSizes(int)}.</p>
689      *
690      * <p>This should correspond to the frame duration when only that stream is active, with all
691      * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
692      * </p>
693      *
694      * <p>When multiple streams are used in a request, the minimum frame duration will be
695      * {@code max(individual stream min durations)}.</p>
696      *
697      * <p>For devices that do not support manual sensor control
698      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
699      * this function may return 0.</p>
700      *
701      * <!--
702      * TODO: uncomment after adding input stream support
703      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
704      * regardless of whether the stream is input or output.</p>
705      * -->
706      *
707      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
708      * @param size an output-compatible size
709      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
710      *          0 if the minimum frame duration is not available.
711      *
712      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
713      * @throws NullPointerException if {@code size} was {@code null}
714      *
715      * @see CaptureRequest#SENSOR_FRAME_DURATION
716      * @see #getOutputStallDuration(int, Size)
717      * @see ImageFormat
718      * @see PixelFormat
719      */
getOutputMinFrameDuration(int format, Size size)720     public long getOutputMinFrameDuration(int format, Size size) {
721         checkNotNull(size, "size must not be null");
722         checkArgumentFormatSupported(format, /*output*/true);
723 
724         return getInternalFormatDuration(imageFormatToInternal(format),
725                 imageFormatToDataspace(format),
726                 size,
727                 DURATION_MIN_FRAME);
728     }
729 
730     /**
731      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
732      * for the class/size combination (in nanoseconds).
733      *
734      * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
735      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
736      *
737      * <p>{@code klass} should be one of the ones which is supported by
738      * {@link #isOutputSupportedFor(Class)}.</p>
739      *
740      * <p>{@code size} should be one of the ones returned by
741      * {@link #getOutputSizes(int)}.</p>
742      *
743      * <p>This should correspond to the frame duration when only that stream is active, with all
744      * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
745      * </p>
746      *
747      * <p>When multiple streams are used in a request, the minimum frame duration will be
748      * {@code max(individual stream min durations)}.</p>
749      *
750      * <p>For devices that do not support manual sensor control
751      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
752      * this function may return 0.</p>
753      *
754      * <!--
755      * TODO: uncomment after adding input stream support
756      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
757      * regardless of whether the stream is input or output.</p>
758      * -->
759      *
760      * @param klass
761      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
762      *          non-empty array returned by {@link #getOutputSizes(Class)}
763      * @param size an output-compatible size
764      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
765      *          0 if the minimum frame duration is not available.
766      *
767      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
768      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
769      *
770      * @see CaptureRequest#SENSOR_FRAME_DURATION
771      * @see ImageFormat
772      * @see PixelFormat
773      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)774     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
775         if (!isOutputSupportedFor(klass)) {
776             throw new IllegalArgumentException("klass was not supported");
777         }
778 
779         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
780                 HAL_DATASPACE_UNKNOWN,
781                 size, DURATION_MIN_FRAME);
782     }
783 
784     /**
785      * Get the stall duration for the format/size combination (in nanoseconds).
786      *
787      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
788      * <p>{@code size} should be one of the ones returned by
789      * {@link #getOutputSizes(int)}.</p>
790      *
791      * <p>
792      * A stall duration is how much extra time would get added to the normal minimum frame duration
793      * for a repeating request that has streams with non-zero stall.
794      *
795      * <p>For example, consider JPEG captures which have the following characteristics:
796      *
797      * <ul>
798      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
799      * in requests in which they are directly referenced, they act as JPEG streams.
800      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
801      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
802      * requests that actually reference a JPEG stream.
803      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
804      * process more than 1 capture at a time.
805      * </ul>
806      *
807      * <p>In other words, using a repeating YUV request would result in a steady frame rate
808      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
809      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
810      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
811      * 30 FPS.</p>
812      *
813      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
814      * frame rate drop unless there are still outstanding buffers for that stream from previous
815      * requests.</p>
816      *
817      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
818      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
819      * added with the maximum stall duration for {@code S}.</p>
820      *
821      * <p>If interleaving requests with and without a stall duration, a request will stall by the
822      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
823      *
824      * <p>This means that a stalling request will not have an exposure start until the stall has
825      * completed.</p>
826      *
827      * <p>This should correspond to the stall duration when only that stream is active, with all
828      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
829      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
830      * indeterminate stall duration for all streams in a request (the regular stall calculation
831      * rules are ignored).</p>
832      *
833      * <p>The following formats may always have a stall duration:
834      * <ul>
835      * <li>{@link ImageFormat#JPEG JPEG}
836      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
837      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
838      * </ul>
839      * </p>
840      *
841      * <p>The following formats will never have a stall duration:
842      * <ul>
843      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
844      * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
845      * </ul></p>
846      *
847      * <p>
848      * All other formats may or may not have an allowed stall duration on a per-capability basis;
849      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
850      * android.request.availableCapabilities} for more details.</p>
851      * </p>
852      *
853      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
854      * for more information about calculating the max frame rate (absent stalls).</p>
855      *
856      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
857      * @param size an output-compatible size
858      * @return a stall duration {@code >=} 0 in nanoseconds
859      *
860      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
861      * @throws NullPointerException if {@code size} was {@code null}
862      *
863      * @see CaptureRequest#SENSOR_FRAME_DURATION
864      * @see ImageFormat
865      * @see PixelFormat
866      */
getOutputStallDuration(int format, Size size)867     public long getOutputStallDuration(int format, Size size) {
868         checkArgumentFormatSupported(format, /*output*/true);
869 
870         return getInternalFormatDuration(imageFormatToInternal(format),
871                 imageFormatToDataspace(format),
872                 size,
873                 DURATION_STALL);
874     }
875 
876     /**
877      * Get the stall duration for the class/size combination (in nanoseconds).
878      *
879      * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
880      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
881      *
882      * <p>{@code klass} should be one of the ones with a non-empty array returned by
883      * {@link #getOutputSizes(Class)}.</p>
884      *
885      * <p>{@code size} should be one of the ones returned by
886      * {@link #getOutputSizes(Class)}.</p>
887      *
888      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
889      * <em>stall duration</em>.</p>
890      *
891      * @param klass
892      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
893      *          non-empty array returned by {@link #getOutputSizes(Class)}
894      * @param size an output-compatible size
895      * @return a minimum frame duration {@code >=} 0 in nanoseconds
896      *
897      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
898      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
899      *
900      * @see CaptureRequest#SENSOR_FRAME_DURATION
901      * @see ImageFormat
902      * @see PixelFormat
903      */
getOutputStallDuration(final Class<T> klass, final Size size)904     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
905         if (!isOutputSupportedFor(klass)) {
906             throw new IllegalArgumentException("klass was not supported");
907         }
908 
909         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
910                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
911     }
912 
913     /**
914      * Check if this {@link StreamConfigurationMap} is equal to another
915      * {@link StreamConfigurationMap}.
916      *
917      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
918      *
919      * @return {@code true} if the objects were equal, {@code false} otherwise
920      */
921     @Override
equals(final Object obj)922     public boolean equals(final Object obj) {
923         if (obj == null) {
924             return false;
925         }
926         if (this == obj) {
927             return true;
928         }
929         if (obj instanceof StreamConfigurationMap) {
930             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
931             // XX: do we care about order?
932             return Arrays.equals(mConfigurations, other.mConfigurations) &&
933                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
934                     Arrays.equals(mStallDurations, other.mStallDurations) &&
935                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
936                     Arrays.equals(mHighSpeedVideoConfigurations,
937                             other.mHighSpeedVideoConfigurations);
938         }
939         return false;
940     }
941 
942     /**
943      * {@inheritDoc}
944      */
945     @Override
hashCode()946     public int hashCode() {
947         // XX: do we care about order?
948         return HashCodeHelpers.hashCodeGeneric(
949                 mConfigurations, mMinFrameDurations,
950                 mStallDurations,
951                 mDepthConfigurations, mHighSpeedVideoConfigurations);
952     }
953 
954     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)955     private int checkArgumentFormatSupported(int format, boolean output) {
956         checkArgumentFormat(format);
957 
958         int internalFormat = imageFormatToInternal(format);
959         int internalDataspace = imageFormatToDataspace(format);
960 
961         if (output) {
962             if (internalDataspace == HAL_DATASPACE_DEPTH) {
963                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
964                     return format;
965                 }
966             } else {
967                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
968                     return format;
969                 }
970             }
971         } else {
972             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
973                 return format;
974             }
975         }
976 
977         throw new IllegalArgumentException(String.format(
978                 "format %x is not supported by this stream configuration map", format));
979     }
980 
981     /**
982      * Ensures that the format is either user-defined or implementation defined.
983      *
984      * <p>If a format has a different internal representation than the public representation,
985      * passing in the public representation here will fail.</p>
986      *
987      * <p>For example if trying to use {@link ImageFormat#JPEG}:
988      * it has a different public representation than the internal representation
989      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
990      *
991      * <p>Any invalid/undefined formats will raise an exception.</p>
992      *
993      * @param format image format
994      * @return the format
995      *
996      * @throws IllegalArgumentException if the format was invalid
997      */
checkArgumentFormatInternal(int format)998     static int checkArgumentFormatInternal(int format) {
999         switch (format) {
1000             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1001             case HAL_PIXEL_FORMAT_BLOB:
1002             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1003             case HAL_PIXEL_FORMAT_Y16:
1004                 return format;
1005             case ImageFormat.JPEG:
1006                 throw new IllegalArgumentException(
1007                         "ImageFormat.JPEG is an unknown internal format");
1008             default:
1009                 return checkArgumentFormat(format);
1010         }
1011     }
1012 
1013     /**
1014      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1015      *
1016      * <p>If a format has a different public representation than the internal representation,
1017      * passing in the internal representation here will fail.</p>
1018      *
1019      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1020      * it has a different internal representation than the public representation
1021      * {@link ImageFormat#JPEG}, this check will fail.</p>
1022      *
1023      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1024      * </p>
1025      *
1026      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1027      *
1028      * @param format image format
1029      * @return the format
1030      *
1031      * @throws IllegalArgumentException if the format was not user-defined
1032      */
checkArgumentFormat(int format)1033     static int checkArgumentFormat(int format) {
1034         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1035             throw new IllegalArgumentException(String.format(
1036                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1037         }
1038 
1039         return format;
1040     }
1041 
1042     /**
1043      * Convert an internal format compatible with {@code graphics.h} into public-visible
1044      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1045      *
1046      * <p>In particular these formats are converted:
1047      * <ul>
1048      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1049      * </ul>
1050      * </p>
1051      *
1052      * <p>Passing in a format which has no public equivalent will fail;
1053      * as will passing in a public format which has a different internal format equivalent.
1054      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1055      *
1056      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1057      *
1058      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1059      * HAL_DATASPACE_DEPTH.</p>
1060      *
1061      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1062      * @return the converted image formats
1063      *
1064      * @throws IllegalArgumentException
1065      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1066      *          {@link ImageFormat#JPEG}
1067      *
1068      * @see ImageFormat
1069      * @see PixelFormat
1070      * @see #checkArgumentFormat
1071      */
imageFormatToPublic(int format)1072     static int imageFormatToPublic(int format) {
1073         switch (format) {
1074             case HAL_PIXEL_FORMAT_BLOB:
1075                 return ImageFormat.JPEG;
1076             case ImageFormat.JPEG:
1077                 throw new IllegalArgumentException(
1078                         "ImageFormat.JPEG is an unknown internal format");
1079             default:
1080                 return format;
1081         }
1082     }
1083 
1084     /**
1085      * Convert an internal format compatible with {@code graphics.h} into public-visible
1086      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1087      *
1088      * <p>In particular these formats are converted:
1089      * <ul>
1090      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1091      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1092      * </ul>
1093      * </p>
1094      *
1095      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1096      * as will passing in a public format which has a different internal format equivalent.
1097      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1098      *
1099      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1100      *
1101      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1102      * HAL_DATASPACE_DEPTH.</p>
1103      *
1104      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1105      * @return the converted image formats
1106      *
1107      * @throws IllegalArgumentException
1108      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1109      *          {@link ImageFormat#JPEG}
1110      *
1111      * @see ImageFormat
1112      * @see PixelFormat
1113      * @see #checkArgumentFormat
1114      */
depthFormatToPublic(int format)1115     static int depthFormatToPublic(int format) {
1116         switch (format) {
1117             case HAL_PIXEL_FORMAT_BLOB:
1118                 return ImageFormat.DEPTH_POINT_CLOUD;
1119             case HAL_PIXEL_FORMAT_Y16:
1120                 return ImageFormat.DEPTH16;
1121             case ImageFormat.JPEG:
1122                 throw new IllegalArgumentException(
1123                         "ImageFormat.JPEG is an unknown internal format");
1124             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1125                 throw new IllegalArgumentException(
1126                         "IMPLEMENTATION_DEFINED must not leak to public API");
1127             default:
1128                 throw new IllegalArgumentException(
1129                         "Unknown DATASPACE_DEPTH format " + format);
1130         }
1131     }
1132 
1133     /**
1134      * Convert image formats from internal to public formats (in-place).
1135      *
1136      * @param formats an array of image formats
1137      * @return {@code formats}
1138      *
1139      * @see #imageFormatToPublic
1140      */
imageFormatToPublic(int[] formats)1141     static int[] imageFormatToPublic(int[] formats) {
1142         if (formats == null) {
1143             return null;
1144         }
1145 
1146         for (int i = 0; i < formats.length; ++i) {
1147             formats[i] = imageFormatToPublic(formats[i]);
1148         }
1149 
1150         return formats;
1151     }
1152 
1153     /**
1154      * Convert a public format compatible with {@code ImageFormat} to an internal format
1155      * from {@code graphics.h}.
1156      *
1157      * <p>In particular these formats are converted:
1158      * <ul>
1159      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1160      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1161      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1162      * </ul>
1163      * </p>
1164      *
1165      * <p>Passing in an internal format which has a different public format equivalent will fail.
1166      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1167      *
1168      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1169      *
1170      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1171      *
1172      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1173      * @return the converted image formats
1174      *
1175      * @see ImageFormat
1176      * @see PixelFormat
1177      *
1178      * @throws IllegalArgumentException
1179      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1180      */
imageFormatToInternal(int format)1181     static int imageFormatToInternal(int format) {
1182         switch (format) {
1183             case ImageFormat.JPEG:
1184             case ImageFormat.DEPTH_POINT_CLOUD:
1185                 return HAL_PIXEL_FORMAT_BLOB;
1186             case ImageFormat.DEPTH16:
1187                 return HAL_PIXEL_FORMAT_Y16;
1188             default:
1189                 return format;
1190         }
1191     }
1192 
1193     /**
1194      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1195      * from {@code graphics.h}.
1196      *
1197      * <p>In particular these formats are converted:
1198      * <ul>
1199      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1200      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1201      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1202      * <li>others => HAL_DATASPACE_UNKNOWN
1203      * </ul>
1204      * </p>
1205      *
1206      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1207      * as will passing in an internal format which has a different public format equivalent.
1208      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1209      *
1210      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1211      *
1212      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1213      *
1214      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1215      * @return the converted image formats
1216      *
1217      * @see ImageFormat
1218      * @see PixelFormat
1219      *
1220      * @throws IllegalArgumentException
1221      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1222      */
imageFormatToDataspace(int format)1223     static int imageFormatToDataspace(int format) {
1224         switch (format) {
1225             case ImageFormat.JPEG:
1226                 return HAL_DATASPACE_V0_JFIF;
1227             case ImageFormat.DEPTH_POINT_CLOUD:
1228             case ImageFormat.DEPTH16:
1229                 return HAL_DATASPACE_DEPTH;
1230             default:
1231                 return HAL_DATASPACE_UNKNOWN;
1232         }
1233     }
1234 
1235     /**
1236      * Convert image formats from public to internal formats (in-place).
1237      *
1238      * @param formats an array of image formats
1239      * @return {@code formats}
1240      *
1241      * @see #imageFormatToInternal
1242      *
1243      * @hide
1244      */
imageFormatToInternal(int[] formats)1245     public static int[] imageFormatToInternal(int[] formats) {
1246         if (formats == null) {
1247             return null;
1248         }
1249 
1250         for (int i = 0; i < formats.length; ++i) {
1251             formats[i] = imageFormatToInternal(formats[i]);
1252         }
1253 
1254         return formats;
1255     }
1256 
getPublicFormatSizes(int format, boolean output, boolean highRes)1257     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1258         try {
1259             checkArgumentFormatSupported(format, output);
1260         } catch (IllegalArgumentException e) {
1261             return null;
1262         }
1263 
1264         int internalFormat = imageFormatToInternal(format);
1265         int dataspace = imageFormatToDataspace(format);
1266 
1267         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1268     }
1269 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1270     private Size[] getInternalFormatSizes(int format, int dataspace,
1271             boolean output, boolean highRes) {
1272         // All depth formats are non-high-res.
1273         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1274             return new Size[0];
1275         }
1276 
1277         SparseIntArray formatsMap =
1278                 !output ? mInputFormats :
1279                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1280                 highRes ? mHighResOutputFormats :
1281                 mOutputFormats;
1282 
1283         int sizesCount = formatsMap.get(format);
1284         if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) ||
1285                 (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
1286             // Only throw if this is really not supported at all
1287             throw new IllegalArgumentException("format not available");
1288         }
1289 
1290         Size[] sizes = new Size[sizesCount];
1291         int sizeIndex = 0;
1292 
1293         StreamConfiguration[] configurations =
1294                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
1295         StreamConfigurationDuration[] minFrameDurations =
1296                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations;
1297 
1298         for (StreamConfiguration config : configurations) {
1299             int fmt = config.getFormat();
1300             if (fmt == format && config.isOutput() == output) {
1301                 if (output && mListHighResolution) {
1302                     // Filter slow high-res output formats; include for
1303                     // highRes, remove for !highRes
1304                     long duration = 0;
1305                     for (int i = 0; i < minFrameDurations.length; i++) {
1306                         StreamConfigurationDuration d = minFrameDurations[i];
1307                         if (d.getFormat() == fmt &&
1308                                 d.getWidth() == config.getSize().getWidth() &&
1309                                 d.getHeight() == config.getSize().getHeight()) {
1310                             duration = d.getDuration();
1311                             break;
1312                         }
1313                     }
1314                     if (dataspace != HAL_DATASPACE_DEPTH &&
1315                             highRes != (duration > DURATION_20FPS_NS)) {
1316                         continue;
1317                     }
1318                 }
1319                 sizes[sizeIndex++] = config.getSize();
1320             }
1321         }
1322 
1323         if (sizeIndex != sizesCount) {
1324             throw new AssertionError(
1325                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1326         }
1327 
1328         return sizes;
1329     }
1330 
1331     /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
getPublicFormats(boolean output)1332     private int[] getPublicFormats(boolean output) {
1333         int[] formats = new int[getPublicFormatCount(output)];
1334 
1335         int i = 0;
1336 
1337         SparseIntArray map = getFormatsMap(output);
1338         for (int j = 0; j < map.size(); j++) {
1339             int format = map.keyAt(j);
1340             formats[i++] = imageFormatToPublic(format);
1341         }
1342         if (output) {
1343             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1344                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1345             }
1346         }
1347         if (formats.length != i) {
1348             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1349         }
1350 
1351         return formats;
1352     }
1353 
1354     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1355     private SparseIntArray getFormatsMap(boolean output) {
1356         return output ? mAllOutputFormats : mInputFormats;
1357     }
1358 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1359     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1360         // assume format is already checked, since its internal
1361 
1362         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1363             throw new IllegalArgumentException("size was not supported");
1364         }
1365 
1366         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1367 
1368         for (StreamConfigurationDuration configurationDuration : durations) {
1369             if (configurationDuration.getFormat() == format &&
1370                     configurationDuration.getWidth() == size.getWidth() &&
1371                     configurationDuration.getHeight() == size.getHeight()) {
1372                 return configurationDuration.getDuration();
1373             }
1374         }
1375         // Default duration is '0' (unsupported/no extra stall)
1376         return 0;
1377     }
1378 
1379     /**
1380      * Get the durations array for the kind of duration
1381      *
1382      * @see #DURATION_MIN_FRAME
1383      * @see #DURATION_STALL
1384      * */
getDurations(int duration, int dataspace)1385     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1386         switch (duration) {
1387             case DURATION_MIN_FRAME:
1388                 return (dataspace == HAL_DATASPACE_DEPTH) ?
1389                         mDepthMinFrameDurations : mMinFrameDurations;
1390             case DURATION_STALL:
1391                 return (dataspace == HAL_DATASPACE_DEPTH) ?
1392                         mDepthStallDurations : mStallDurations;
1393             default:
1394                 throw new IllegalArgumentException("duration was invalid");
1395         }
1396     }
1397 
1398     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1399     private int getPublicFormatCount(boolean output) {
1400         SparseIntArray formatsMap = getFormatsMap(output);
1401         int size = formatsMap.size();
1402         if (output) {
1403             size += mDepthOutputFormats.size();
1404         }
1405 
1406         return size;
1407     }
1408 
arrayContains(T[] array, T element)1409     private static <T> boolean arrayContains(T[] array, T element) {
1410         if (array == null) {
1411             return false;
1412         }
1413 
1414         for (T el : array) {
1415             if (Objects.equals(el, element)) {
1416                 return true;
1417             }
1418         }
1419 
1420         return false;
1421     }
1422 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1423     private boolean isSupportedInternalConfiguration(int format, int dataspace,
1424             Size size) {
1425         StreamConfiguration[] configurations =
1426                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
1427 
1428         for (int i = 0; i < configurations.length; i++) {
1429             if (configurations[i].getFormat() == format &&
1430                     configurations[i].getSize().equals(size)) {
1431                 return true;
1432             }
1433         }
1434 
1435         return false;
1436     }
1437 
1438     /**
1439      * Return this {@link StreamConfigurationMap} as a string representation.
1440      *
1441      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1442      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1443      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1444      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1445      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1446      *
1447      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1448      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1449      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1450      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1451      * duration in nanoseconds.</p>
1452      *
1453      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1454      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1455      * format.</p>
1456      *
1457      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1458      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1459      * represents an input fomat and its valid output formats.</p>
1460      *
1461      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1462      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1463      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1464      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1465      *
1466      * @return string representation of {@link StreamConfigurationMap}
1467      */
1468     @Override
toString()1469     public String toString() {
1470         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1471         appendOutputsString(sb);
1472         sb.append(", ");
1473         appendHighResOutputsString(sb);
1474         sb.append(", ");
1475         appendInputsString(sb);
1476         sb.append(", ");
1477         appendValidOutputFormatsForInputString(sb);
1478         sb.append(", ");
1479         appendHighSpeedVideoConfigurationsString(sb);
1480         sb.append(")");
1481 
1482         return sb.toString();
1483     }
1484 
appendOutputsString(StringBuilder sb)1485     private void appendOutputsString(StringBuilder sb) {
1486         sb.append("Outputs(");
1487         int[] formats = getOutputFormats();
1488         for (int format : formats) {
1489             Size[] sizes = getOutputSizes(format);
1490             for (Size size : sizes) {
1491                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1492                 long stallDuration = getOutputStallDuration(format, size);
1493                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1494                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1495                         format, minFrameDuration, stallDuration));
1496             }
1497         }
1498         // Remove the pending ", "
1499         if (sb.charAt(sb.length() - 1) == ' ') {
1500             sb.delete(sb.length() - 2, sb.length());
1501         }
1502         sb.append(")");
1503     }
1504 
appendHighResOutputsString(StringBuilder sb)1505     private void appendHighResOutputsString(StringBuilder sb) {
1506         sb.append("HighResolutionOutputs(");
1507         int[] formats = getOutputFormats();
1508         for (int format : formats) {
1509             Size[] sizes = getHighResolutionOutputSizes(format);
1510             if (sizes == null) continue;
1511             for (Size size : sizes) {
1512                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1513                 long stallDuration = getOutputStallDuration(format, size);
1514                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1515                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1516                         format, minFrameDuration, stallDuration));
1517             }
1518         }
1519         // Remove the pending ", "
1520         if (sb.charAt(sb.length() - 1) == ' ') {
1521             sb.delete(sb.length() - 2, sb.length());
1522         }
1523         sb.append(")");
1524     }
1525 
appendInputsString(StringBuilder sb)1526     private void appendInputsString(StringBuilder sb) {
1527         sb.append("Inputs(");
1528         int[] formats = getInputFormats();
1529         for (int format : formats) {
1530             Size[] sizes = getInputSizes(format);
1531             for (Size size : sizes) {
1532                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1533                         size.getHeight(), formatToString(format), format));
1534             }
1535         }
1536         // Remove the pending ", "
1537         if (sb.charAt(sb.length() - 1) == ' ') {
1538             sb.delete(sb.length() - 2, sb.length());
1539         }
1540         sb.append(")");
1541     }
1542 
appendValidOutputFormatsForInputString(StringBuilder sb)1543     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1544         sb.append("ValidOutputFormatsForInput(");
1545         int[] inputFormats = getInputFormats();
1546         for (int inputFormat : inputFormats) {
1547             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1548             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1549             for (int i = 0; i < outputFormats.length; i++) {
1550                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1551                         outputFormats[i]));
1552                 if (i < outputFormats.length - 1) {
1553                     sb.append(", ");
1554                 }
1555             }
1556             sb.append("], ");
1557         }
1558         // Remove the pending ", "
1559         if (sb.charAt(sb.length() - 1) == ' ') {
1560             sb.delete(sb.length() - 2, sb.length());
1561         }
1562         sb.append(")");
1563     }
1564 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)1565     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1566         sb.append("HighSpeedVideoConfigurations(");
1567         Size[] sizes = getHighSpeedVideoSizes();
1568         for (Size size : sizes) {
1569             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1570             for (Range<Integer> range : ranges) {
1571                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1572                         size.getHeight(), range.getLower(), range.getUpper()));
1573             }
1574         }
1575         // Remove the pending ", "
1576         if (sb.charAt(sb.length() - 1) == ' ') {
1577             sb.delete(sb.length() - 2, sb.length());
1578         }
1579         sb.append(")");
1580     }
1581 
formatToString(int format)1582     private String formatToString(int format) {
1583         switch (format) {
1584             case ImageFormat.YV12:
1585                 return "YV12";
1586             case ImageFormat.YUV_420_888:
1587                 return "YUV_420_888";
1588             case ImageFormat.NV21:
1589                 return "NV21";
1590             case ImageFormat.NV16:
1591                 return "NV16";
1592             case PixelFormat.RGB_565:
1593                 return "RGB_565";
1594             case PixelFormat.RGBA_8888:
1595                 return "RGBA_8888";
1596             case PixelFormat.RGBX_8888:
1597                 return "RGBX_8888";
1598             case PixelFormat.RGB_888:
1599                 return "RGB_888";
1600             case ImageFormat.JPEG:
1601                 return "JPEG";
1602             case ImageFormat.YUY2:
1603                 return "YUY2";
1604             case ImageFormat.Y8:
1605                 return "Y8";
1606             case ImageFormat.Y16:
1607                 return "Y16";
1608             case ImageFormat.RAW_SENSOR:
1609                 return "RAW_SENSOR";
1610             case ImageFormat.RAW_PRIVATE:
1611                 return "RAW_PRIVATE";
1612             case ImageFormat.RAW10:
1613                 return "RAW10";
1614             case ImageFormat.DEPTH16:
1615                 return "DEPTH16";
1616             case ImageFormat.DEPTH_POINT_CLOUD:
1617                 return "DEPTH_POINT_CLOUD";
1618             case ImageFormat.PRIVATE:
1619                 return "PRIVATE";
1620             default:
1621                 return "UNKNOWN";
1622         }
1623     }
1624 
1625     // from system/core/include/system/graphics.h
1626     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
1627     private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1628     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1629     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
1630     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1631     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
1632     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
1633     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
1634 
1635 
1636     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
1637     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
1638     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
1639 
1640     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
1641     private static final int HAL_DATASPACE_V0_JFIF =
1642             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
1643             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
1644             (1 << HAL_DATASPACE_RANGE_SHIFT);
1645 
1646     private static final int HAL_DATASPACE_DEPTH = 0x1000;
1647 
1648     private static final long DURATION_20FPS_NS = 50000000L;
1649     /**
1650      * @see #getDurations(int, int)
1651      */
1652     private static final int DURATION_MIN_FRAME = 0;
1653     private static final int DURATION_STALL = 1;
1654 
1655     private final StreamConfiguration[] mConfigurations;
1656     private final StreamConfigurationDuration[] mMinFrameDurations;
1657     private final StreamConfigurationDuration[] mStallDurations;
1658 
1659     private final StreamConfiguration[] mDepthConfigurations;
1660     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
1661     private final StreamConfigurationDuration[] mDepthStallDurations;
1662 
1663     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
1664     private final ReprocessFormatsMap mInputOutputFormatsMap;
1665 
1666     private final boolean mListHighResolution;
1667 
1668     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
1669      * non-depth dataspaces */
1670     private final SparseIntArray mOutputFormats = new SparseIntArray();
1671     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
1672      * dataspaces */
1673     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
1674     /** internal format -> num output sizes mapping for all non-depth dataspaces */
1675     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
1676     /** internal format -> num input sizes mapping, for input reprocessing formats */
1677     private final SparseIntArray mInputFormats = new SparseIntArray();
1678     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
1679     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
1680     /** High speed video Size -> FPS range count mapping*/
1681     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
1682             new HashMap<Size, Integer>();
1683     /** High speed video FPS range -> Size count mapping*/
1684     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
1685             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
1686 
1687 }
1688