1 /*
2  * Copyright (C) 2013-2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ANDROID_INCLUDE_CAMERA3_H
18 #define ANDROID_INCLUDE_CAMERA3_H
19 
20 #include <system/camera_metadata.h>
21 #include "camera_common.h"
22 
23 /**
24  * Camera device HAL 3.5[ CAMERA_DEVICE_API_VERSION_3_5 ]
25  *
26  * This is the current recommended version of the camera device HAL.
27  *
28  * Supports the android.hardware.Camera API, and as of v3.2, the
29  * android.hardware.camera2 API as LIMITED or above hardware level.
30  *
31  * Camera devices that support this version of the HAL must return
32  * CAMERA_DEVICE_API_VERSION_3_5 in camera_device_t.common.version and in
33  * camera_info_t.device_version (from camera_module_t.get_camera_info).
34  *
35  * CAMERA_DEVICE_API_VERSION_3_3 and above:
36  *    Camera modules that may contain version 3.3 or above devices must
37  *    implement at least version 2.2 of the camera module interface (as defined
38  *    by camera_module_t.common.module_api_version).
39  *
40  * CAMERA_DEVICE_API_VERSION_3_2:
41  *    Camera modules that may contain version 3.2 devices must implement at
42  *    least version 2.2 of the camera module interface (as defined by
43  *    camera_module_t.common.module_api_version).
44  *
45  * <= CAMERA_DEVICE_API_VERSION_3_1:
46  *    Camera modules that may contain version 3.1 (or 3.0) devices must
47  *    implement at least version 2.0 of the camera module interface
48  *    (as defined by camera_module_t.common.module_api_version).
49  *
50  * See camera_common.h for more versioning details.
51  *
52  * Documentation index:
53  *   S1. Version history
54  *   S2. Startup and operation sequencing
55  *   S3. Operational modes
56  *   S4. 3A modes and state machines
57  *   S5. Cropping
58  *   S6. Error management
59  *   S7. Key Performance Indicator (KPI) glossary
60  *   S8. Sample Use Cases
61  *   S9. Notes on Controls and Metadata
62  *   S10. Reprocessing flow and controls
63  */
64 
65 /**
66  * S1. Version history:
67  *
68  * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]:
69  *
70  *   - Converted from C++ CameraHardwareInterface abstraction layer.
71  *
72  *   - Supports android.hardware.Camera API.
73  *
74  * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:
75  *
76  *   - Sufficient for implementing existing android.hardware.Camera API.
77  *
78  *   - Allows for ZSL queue in camera service layer
79  *
80  *   - Not tested for any new features such manual capture control, Bayer RAW
81  *     capture, reprocessing of RAW data.
82  *
83  * 3.0: First revision of expanded-capability HAL:
84  *
85  *   - Major version change since the ABI is completely different. No change to
86  *     the required hardware capabilities or operational model from 2.0.
87  *
88  *   - Reworked input request and stream queue interfaces: Framework calls into
89  *     HAL with next request and stream buffers already dequeued. Sync framework
90  *     support is included, necessary for efficient implementations.
91  *
92  *   - Moved triggers into requests, most notifications into results.
93  *
94  *   - Consolidated all callbacks into framework into one structure, and all
95  *     setup methods into a single initialize() call.
96  *
97  *   - Made stream configuration into a single call to simplify stream
98  *     management. Bidirectional streams replace STREAM_FROM_STREAM construct.
99  *
100  *   - Limited mode semantics for older/limited hardware devices.
101  *
102  * 3.1: Minor revision of expanded-capability HAL:
103  *
104  *   - configure_streams passes consumer usage flags to the HAL.
105  *
106  *   - flush call to drop all in-flight requests/buffers as fast as possible.
107  *
108  * 3.2: Minor revision of expanded-capability HAL:
109  *
110  *   - Deprecates get_metadata_vendor_tag_ops.  Please use get_vendor_tag_ops
111  *     in camera_common.h instead.
112  *
113  *   - register_stream_buffers deprecated. All gralloc buffers provided
114  *     by framework to HAL in process_capture_request may be new at any time.
115  *
116  *   - add partial result support. process_capture_result may be called
117  *     multiple times with a subset of the available result before the full
118  *     result is available.
119  *
120  *   - add manual template to camera3_request_template. The applications may
121  *     use this template to control the capture settings directly.
122  *
123  *   - Rework the bidirectional and input stream specifications.
124  *
125  *   - change the input buffer return path. The buffer is returned in
126  *     process_capture_result instead of process_capture_request.
127  *
128  * 3.3: Minor revision of expanded-capability HAL:
129  *
130  *   - OPAQUE and YUV reprocessing API updates.
131  *
132  *   - Basic support for depth output buffers.
133  *
134  *   - Addition of data_space field to camera3_stream_t.
135  *
136  *   - Addition of rotation field to camera3_stream_t.
137  *
138  *   - Addition of camera3 stream configuration operation mode to camera3_stream_configuration_t
139  *
140  * 3.4: Minor additions to supported metadata and changes to data_space support
141  *
142  *   - Add ANDROID_SENSOR_OPAQUE_RAW_SIZE static metadata as mandatory if
143  *     RAW_OPAQUE format is supported.
144  *
145  *   - Add ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE static metadata as
146  *     mandatory if any RAW format is supported
147  *
148  *   - Switch camera3_stream_t data_space field to a more flexible definition,
149  *     using the version 0 definition of dataspace encoding.
150  *
151  *   - General metadata additions which are available to use for HALv3.2 or
152  *     newer:
153  *     - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3
154  *     - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST
155  *     - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
156  *     - ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
157  *     - ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL
158  *     - ANDROID_SENSOR_OPAQUE_RAW_SIZE
159  *     - ANDROID_SENSOR_OPTICAL_BLACK_REGIONS
160  *
161  * 3.5: Minor revisions to support session parameters and logical multi camera:
162  *
163  *   - Add ANDROID_REQUEST_AVAILABLE_SESSION_KEYS static metadata, which is
164  *     optional for implementations that want to support session parameters. If support is
165  *     needed, then Hal should populate the list with all available capture request keys
166  *     that can cause severe processing delays when modified by client. Typical examples
167  *     include parameters that require time-consuming HW re-configuration or internal camera
168  *     pipeline update.
169  *
170  *   - Add a session parameter field to camera3_stream_configuration which can be populated
171  *     by clients with initial values for the keys found in ANDROID_REQUEST_AVAILABLE_SESSION_KEYS.
172  *
173  *   - Metadata additions for logical multi camera capability:
174  *     - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA
175  *     - ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS
176  *     - ANDROID_LOGICAL_MULTI_CAMERA_SYNC_TYPE
177  *
178  *   - Add physical camera id field in camera3_stream, so that for a logical
179  *     multi camera, the application has the option to specify which physical camera
180  *     a particular stream is configured on.
181  *
182  *   - Add physical camera id and settings field in camera3_capture_request, so that
183  *     for a logical multi camera, the application has the option to specify individual
184  *     settings for a particular physical device.
185  *
186  */
187 
188 /**
189  * S2. Startup and general expected operation sequence:
190  *
191  * 1. Framework calls camera_module_t->common.open(), which returns a
192  *    hardware_device_t structure.
193  *
194  * 2. Framework inspects the hardware_device_t->version field, and instantiates
195  *    the appropriate handler for that version of the camera hardware device. In
196  *    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to
197  *    a camera3_device_t.
198  *
199  * 3. Framework calls camera3_device_t->ops->initialize() with the framework
200  *    callback function pointers. This will only be called this one time after
201  *    open(), before any other functions in the ops structure are called.
202  *
203  * 4. The framework calls camera3_device_t->ops->configure_streams() with a list
204  *    of input/output streams to the HAL device.
205  *
206  * 5. <= CAMERA_DEVICE_API_VERSION_3_1:
207  *
208  *    The framework allocates gralloc buffers and calls
209  *    camera3_device_t->ops->register_stream_buffers() for at least one of the
210  *    output streams listed in configure_streams. The same stream is registered
211  *    only once.
212  *
213  *    >= CAMERA_DEVICE_API_VERSION_3_2:
214  *
215  *    camera3_device_t->ops->register_stream_buffers() is not called and must
216  *    be NULL.
217  *
218  * 6. The framework requests default settings for some number of use cases with
219  *    calls to camera3_device_t->ops->construct_default_request_settings(). This
220  *    may occur any time after step 3.
221  *
222  * 7. The framework constructs and sends the first capture request to the HAL,
223  *    with settings based on one of the sets of default settings, and with at
224  *    least one output stream, which has been registered earlier by the
225  *    framework. This is sent to the HAL with
226  *    camera3_device_t->ops->process_capture_request(). The HAL must block the
227  *    return of this call until it is ready for the next request to be sent.
228  *
229  *    >= CAMERA_DEVICE_API_VERSION_3_2:
230  *
231  *    The buffer_handle_t provided in the camera3_stream_buffer_t array
232  *    in the camera3_capture_request_t may be new and never-before-seen
233  *    by the HAL on any given new request.
234  *
235  * 8. The framework continues to submit requests, and call
236  *    construct_default_request_settings to get default settings buffers for
237  *    other use cases.
238  *
239  *    <= CAMERA_DEVICE_API_VERSION_3_1:
240  *
241  *    The framework may call register_stream_buffers() at this time for
242  *    not-yet-registered streams.
243  *
244  * 9. When the capture of a request begins (sensor starts exposing for the
245  *    capture) or processing a reprocess request begins, the HAL
246  *    calls camera3_callback_ops_t->notify() with the SHUTTER event, including
247  *    the frame number and the timestamp for start of exposure. For a reprocess
248  *    request, the timestamp must be the start of exposure of the input image
249  *    which can be looked up with android.sensor.timestamp from
250  *    camera3_capture_request_t.settings when process_capture_request() is
251  *    called.
252  *
253  *    <= CAMERA_DEVICE_API_VERSION_3_1:
254  *
255  *    This notify call must be made before the first call to
256  *    process_capture_result() for that frame number.
257  *
258  *    >= CAMERA_DEVICE_API_VERSION_3_2:
259  *
260  *    The camera3_callback_ops_t->notify() call with the SHUTTER event should
261  *    be made as early as possible since the framework will be unable to
262  *    deliver gralloc buffers to the application layer (for that frame) until
263  *    it has a valid timestamp for the start of exposure (or the input image's
264  *    start of exposure for a reprocess request).
265  *
266  *    Both partial metadata results and the gralloc buffers may be sent to the
267  *    framework at any time before or after the SHUTTER event.
268  *
269  * 10. After some pipeline delay, the HAL begins to return completed captures to
270  *    the framework with camera3_callback_ops_t->process_capture_result(). These
271  *    are returned in the same order as the requests were submitted. Multiple
272  *    requests can be in flight at once, depending on the pipeline depth of the
273  *    camera HAL device.
274  *
275  *    >= CAMERA_DEVICE_API_VERSION_3_2:
276  *
277  *    Once a buffer is returned by process_capture_result as part of the
278  *    camera3_stream_buffer_t array, and the fence specified by release_fence
279  *    has been signaled (this is a no-op for -1 fences), the ownership of that
280  *    buffer is considered to be transferred back to the framework. After that,
281  *    the HAL must no longer retain that particular buffer, and the
282  *    framework may clean up the memory for it immediately.
283  *
284  *    process_capture_result may be called multiple times for a single frame,
285  *    each time with a new disjoint piece of metadata and/or set of gralloc
286  *    buffers. The framework will accumulate these partial metadata results
287  *    into one result.
288  *
289  *    In particular, it is legal for a process_capture_result to be called
290  *    simultaneously for both a frame N and a frame N+1 as long as the
291  *    above rule holds for gralloc buffers (both input and output).
292  *
293  * 11. After some time, the framework may stop submitting new requests, wait for
294  *    the existing captures to complete (all buffers filled, all results
295  *    returned), and then call configure_streams() again. This resets the camera
296  *    hardware and pipeline for a new set of input/output streams. Some streams
297  *    may be reused from the previous configuration; if these streams' buffers
298  *    had already been registered with the HAL, they will not be registered
299  *    again. The framework then continues from step 7, if at least one
300  *    registered output stream remains (otherwise, step 5 is required first).
301  *
302  * 12. Alternatively, the framework may call camera3_device_t->common->close()
303  *    to end the camera session. This may be called at any time when no other
304  *    calls from the framework are active, although the call may block until all
305  *    in-flight captures have completed (all results returned, all buffers
306  *    filled). After the close call returns, no more calls to the
307  *    camera3_callback_ops_t functions are allowed from the HAL. Once the
308  *    close() call is underway, the framework may not call any other HAL device
309  *    functions.
310  *
311  * 13. In case of an error or other asynchronous event, the HAL must call
312  *    camera3_callback_ops_t->notify() with the appropriate error/event
313  *    message. After returning from a fatal device-wide error notification, the
314  *    HAL should act as if close() had been called on it. However, the HAL must
315  *    either cancel or complete all outstanding captures before calling
316  *    notify(), so that once notify() is called with a fatal error, the
317  *    framework will not receive further callbacks from the device. Methods
318  *    besides close() should return -ENODEV or NULL after the notify() method
319  *    returns from a fatal error message.
320  */
321 
322 /**
323  * S3. Operational modes:
324  *
325  * The camera 3 HAL device can implement one of two possible operational modes;
326  * limited and full. Full support is expected from new higher-end
327  * devices. Limited mode has hardware requirements roughly in line with those
328  * for a camera HAL device v1 implementation, and is expected from older or
329  * inexpensive devices. Full is a strict superset of limited, and they share the
330  * same essential operational flow, as documented above.
331  *
332  * The HAL must indicate its level of support with the
333  * android.info.supportedHardwareLevel static metadata entry, with 0 indicating
334  * limited mode, and 1 indicating full mode support.
335  *
336  * Roughly speaking, limited-mode devices do not allow for application control
337  * of capture settings (3A control only), high-rate capture of high-resolution
338  * images, raw sensor readout, or support for YUV output streams above maximum
339  * recording resolution (JPEG only for large images).
340  *
341  * ** Details of limited mode behavior:
342  *
343  * - Limited-mode devices do not need to implement accurate synchronization
344  *   between capture request settings and the actual image data
345  *   captured. Instead, changes to settings may take effect some time in the
346  *   future, and possibly not for the same output frame for each settings
347  *   entry. Rapid changes in settings may result in some settings never being
348  *   used for a capture. However, captures that include high-resolution output
349  *   buffers ( > 1080p ) have to use the settings as specified (but see below
350  *   for processing rate).
351  *
352  * - Limited-mode devices do not need to support most of the
353  *   settings/result/static info metadata. Specifically, only the following settings
354  *   are expected to be consumed or produced by a limited-mode HAL device:
355  *
356  *   android.control.aeAntibandingMode (controls and dynamic)
357  *   android.control.aeExposureCompensation (controls and dynamic)
358  *   android.control.aeLock (controls and dynamic)
359  *   android.control.aeMode (controls and dynamic)
360  *   android.control.aeRegions (controls and dynamic)
361  *   android.control.aeTargetFpsRange (controls and dynamic)
362  *   android.control.aePrecaptureTrigger (controls and dynamic)
363  *   android.control.afMode (controls and dynamic)
364  *   android.control.afRegions (controls and dynamic)
365  *   android.control.awbLock (controls and dynamic)
366  *   android.control.awbMode (controls and dynamic)
367  *   android.control.awbRegions (controls and dynamic)
368  *   android.control.captureIntent (controls and dynamic)
369  *   android.control.effectMode (controls and dynamic)
370  *   android.control.mode (controls and dynamic)
371  *   android.control.sceneMode (controls and dynamic)
372  *   android.control.videoStabilizationMode (controls and dynamic)
373  *   android.control.aeAvailableAntibandingModes (static)
374  *   android.control.aeAvailableModes (static)
375  *   android.control.aeAvailableTargetFpsRanges (static)
376  *   android.control.aeCompensationRange (static)
377  *   android.control.aeCompensationStep (static)
378  *   android.control.afAvailableModes (static)
379  *   android.control.availableEffects (static)
380  *   android.control.availableSceneModes (static)
381  *   android.control.availableVideoStabilizationModes (static)
382  *   android.control.awbAvailableModes (static)
383  *   android.control.maxRegions (static)
384  *   android.control.sceneModeOverrides (static)
385  *   android.control.aeState (dynamic)
386  *   android.control.afState (dynamic)
387  *   android.control.awbState (dynamic)
388  *
389  *   android.flash.mode (controls and dynamic)
390  *   android.flash.info.available (static)
391  *
392  *   android.info.supportedHardwareLevel (static)
393  *
394  *   android.jpeg.gpsCoordinates (controls and dynamic)
395  *   android.jpeg.gpsProcessingMethod (controls and dynamic)
396  *   android.jpeg.gpsTimestamp (controls and dynamic)
397  *   android.jpeg.orientation (controls and dynamic)
398  *   android.jpeg.quality (controls and dynamic)
399  *   android.jpeg.thumbnailQuality (controls and dynamic)
400  *   android.jpeg.thumbnailSize (controls and dynamic)
401  *   android.jpeg.availableThumbnailSizes (static)
402  *   android.jpeg.maxSize (static)
403  *
404  *   android.lens.info.minimumFocusDistance (static)
405  *
406  *   android.request.id (controls and dynamic)
407  *
408  *   android.scaler.cropRegion (controls and dynamic)
409  *   android.scaler.availableStreamConfigurations (static)
410  *   android.scaler.availableMinFrameDurations (static)
411  *   android.scaler.availableStallDurations (static)
412  *   android.scaler.availableMaxDigitalZoom (static)
413  *   android.scaler.maxDigitalZoom (static)
414  *   android.scaler.croppingType (static)
415  *
416  *   android.sensor.orientation (static)
417  *   android.sensor.timestamp (dynamic)
418  *
419  *   android.statistics.faceDetectMode (controls and dynamic)
420  *   android.statistics.info.availableFaceDetectModes (static)
421  *   android.statistics.faceIds (dynamic)
422  *   android.statistics.faceLandmarks (dynamic)
423  *   android.statistics.faceRectangles (dynamic)
424  *   android.statistics.faceScores (dynamic)
425  *
426  *   android.sync.frameNumber (dynamic)
427  *   android.sync.maxLatency (static)
428  *
429  * - Captures in limited mode that include high-resolution (> 1080p) output
430  *   buffers may block in process_capture_request() until all the output buffers
431  *   have been filled. A full-mode HAL device must process sequences of
432  *   high-resolution requests at the rate indicated in the static metadata for
433  *   that pixel format. The HAL must still call process_capture_result() to
434  *   provide the output; the framework must simply be prepared for
435  *   process_capture_request() to block until after process_capture_result() for
436  *   that request completes for high-resolution captures for limited-mode
437  *   devices.
438  *
439  * - Full-mode devices must support below additional capabilities:
440  *   - 30fps at maximum resolution is preferred, more than 20fps is required.
441  *   - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL).
442  *   - Sensor manual control metadata. See MANUAL_SENSOR defined in
443  *     android.request.availableCapabilities.
444  *   - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined
445  *     in android.request.availableCapabilities.
446  *
447  */
448 
449 /**
450  * S4. 3A modes and state machines:
451  *
452  * While the actual 3A algorithms are up to the HAL implementation, a high-level
453  * state machine description is defined by the HAL interface, to allow the HAL
454  * device and the framework to communicate about the current state of 3A, and to
455  * trigger 3A events.
456  *
457  * When the device is opened, all the individual 3A states must be
458  * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked
459  * focus must be maintained across the configure() call.
460  *
461  * Triggering a 3A action involves simply setting the relevant trigger entry in
462  * the settings for the next request to indicate start of trigger. For example,
463  * the trigger for starting an autofocus scan is setting the entry
464  * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one
465  * request, and cancelling an autofocus scan is triggered by setting
466  * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise,
467  * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each
468  * request with a trigger entry set to a non-IDLE value will be treated as an
469  * independent triggering event.
470  *
471  * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which
472  * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode
473  * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting
474  * (ANDROID_CONTROL_USE_SCENE_MODE).
475  *
476  * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF,
477  *   and none of the capture controls may be overridden by the 3A routines.
478  *
479  * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run
480  *   their own independent algorithms, and have their own mode, state, and
481  *   trigger metadata entries, as listed in the next section.
482  *
483  * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must
484  *   be used to determine the behavior of 3A routines. In SCENE_MODEs other than
485  *   FACE_PRIORITY, the HAL must override the values of
486  *   ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected
487  *   SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use
488  *   CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene
489  *   must be ignored for these scene modes.
490  *
491  * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in
492  *   ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering
493  *   and focusing on any detected faces in the scene.
494  *
495  * S4.1. Auto-focus settings and result entries:
496  *
497  *  Main metadata entries:
498  *
499  *   ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus
500  *      mode. Set by the framework in the request settings.
501  *
502  *     AF_MODE_OFF: AF is disabled; the framework/app directly controls lens
503  *         position.
504  *
505  *     AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is
506  *         triggered.
507  *
508  *     AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless
509  *         AF is triggered.
510  *
511  *     AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording
512  *         video. Triggering immediately locks focus in current
513  *         position. Canceling resumes cotinuous focusing.
514  *
515  *     AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for
516  *        zero-shutter-lag still capture. Triggering locks focus once currently
517  *        active sweep concludes. Canceling resumes continuous focusing.
518  *
519  *     AF_MODE_EDOF: Advanced extended depth of field focusing. There is no
520  *        autofocus scan, so triggering one or canceling one has no effect.
521  *        Images are focused automatically by the HAL.
522  *
523  *   ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF
524  *       algorithm state, reported by the HAL in the result metadata.
525  *
526  *     AF_STATE_INACTIVE: No focusing has been done, or algorithm was
527  *        reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF.
528  *        When the device is opened, it must start in this state.
529  *
530  *     AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning
531  *        for good focus. The lens is moving.
532  *
533  *     AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is
534  *        well focused. The lens is not moving. The HAL may spontaneously leave
535  *        this state.
536  *
537  *     AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is
538  *        not well focused. The lens is not moving. The HAL may spontaneously
539  *        leave this state.
540  *
541  *     AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.
542  *
543  *     AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The
544  *        lens is not moving.
545  *
546  *     AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to
547  *        focus. The lens is not moving.
548  *
549  *   ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the
550  *       meaning of which is mode- and state- dependent. Set by the framework in
551  *       the request settings.
552  *
553  *     AF_TRIGGER_IDLE: No current trigger.
554  *
555  *     AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state
556  *         dependent.
557  *
558  *     AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to
559  *         default.
560  *
561  *  Additional metadata entries:
562  *
563  *   ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV
564  *       that should be used to determine good focus. This applies to all AF
565  *       modes that scan for focus. Set by the framework in the request
566  *       settings.
567  *
568  * S4.2. Auto-exposure settings and result entries:
569  *
570  *  Main metadata entries:
571  *
572  *   ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure
573  *       mode. Set by the framework in the request settings.
574  *
575  *     AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain,
576  *         frame duration, and flash.
577  *
578  *     AE_MODE_ON: Standard autoexposure, with flash control disabled. User may
579  *         set flash to fire or to torch mode.
580  *
581  *     AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's
582  *         discretion for precapture and still capture. User control of flash
583  *         disabled.
584  *
585  *     AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired
586  *         for capture, and at HAL's discretion for precapture.. User control of
587  *         flash disabled.
588  *
589  *     AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at
590  *         HAL's discretion for precapture and still capture. Use a flash burst
591  *         at end of precapture sequence to reduce redeye in the final
592  *         picture. User control of flash disabled.
593  *
594  *   ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE
595  *       algorithm state, reported by the HAL in the result metadata.
596  *
597  *     AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is
598  *         opened, it must start in this state.
599  *
600  *     AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting
601  *         exposure parameters.
602  *
603  *     AE_STATE_CONVERGED: AE has found good exposure values for the current
604  *         scene, and the exposure parameters are not changing. HAL may
605  *         spontaneously leave this state to search for better solution.
606  *
607  *     AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure
608  *         values are not changing.
609  *
610  *     AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes
611  *         flash is required for a sufficiently bright picture. Used for
612  *         determining if a zero-shutter-lag frame can be used.
613  *
614  *     AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture
615  *         sequence. Depending on AE mode, this mode may involve firing the
616  *         flash for metering, or a burst of flash pulses for redeye reduction.
617  *
618  *   ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering
619  *       sequence before capturing a high-quality image. Set by the framework in
620  *       the request settings.
621  *
622  *      PRECAPTURE_TRIGGER_IDLE: No current trigger.
623  *
624  *      PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should
625  *         use the subsequent requests to measure good exposure/white balance
626  *         for an upcoming high-resolution capture.
627  *
628  *  Additional metadata entries:
629  *
630  *   ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current
631  *       values
632  *
633  *   ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE
634  *       algorithm target brightness point.
635  *
636  *   ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame
637  *       rate range for the AE algorithm. The AE routine cannot change the frame
638  *       rate to be outside these bounds.
639  *
640  *   ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV
641  *       that should be used to determine good exposure levels. This applies to
642  *       all AE modes besides OFF.
643  *
644  * S4.3. Auto-whitebalance settings and result entries:
645  *
646  *  Main metadata entries:
647  *
648  *   ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance
649  *       mode.
650  *
651  *     AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.
652  *
653  *     AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color
654  *        transform, possibly using more complex transforms than a simple
655  *        matrix.
656  *
657  *     AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor
658  *        incandescent (tungsten) lighting, roughly 2700K.
659  *
660  *     AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent
661  *        lighting, roughly 5000K.
662  *
663  *     AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for
664  *        fluorescent lighting, roughly 3000K.
665  *
666  *     AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight,
667  *        roughly 5500K.
668  *
669  *     AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded
670  *        daylight, roughly 6500K.
671  *
672  *     AWB_MODE_TWILIGHT: Fixed white balance settings good for
673  *        near-sunset/sunrise, roughly 15000K.
674  *
675  *     AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly
676  *        lit by the sun, roughly 7500K.
677  *
678  *   ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB
679  *       algorithm state, reported by the HAL in the result metadata.
680  *
681  *     AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device
682  *         is opened, it must start in this state.
683  *
684  *     AWB_STATE_SEARCHING: AWB is not converged to a good value, and is
685  *         changing color adjustment parameters.
686  *
687  *     AWB_STATE_CONVERGED: AWB has found good color adjustment values for the
688  *         current scene, and the parameters are not changing. HAL may
689  *         spontaneously leave this state to search for better solution.
690  *
691  *     AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color
692  *         adjustment values are not changing.
693  *
694  *  Additional metadata entries:
695  *
696  *   ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to
697  *       their current values.
698  *
699  *   ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV
700  *       that should be used to determine good color balance. This applies only
701  *       to auto-WB mode.
702  *
703  * S4.4. General state machine transition notes
704  *
705  *   Switching between AF, AE, or AWB modes always resets the algorithm's state
706  *   to INACTIVE.  Similarly, switching between CONTROL_MODE or
707  *   CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the
708  *   algorithm states to INACTIVE.
709  *
710  *   The tables below are per-mode.
711  *
712  * S4.5. AF state machines
713  *
714  *                       when enabling AF or changing AF mode
715  *| state              | trans. cause  | new state          | notes            |
716  *+--------------------+---------------+--------------------+------------------+
717  *| Any                | AF mode change| INACTIVE           |                  |
718  *+--------------------+---------------+--------------------+------------------+
719  *
720  *                            mode = AF_MODE_OFF or AF_MODE_EDOF
721  *| state              | trans. cause  | new state          | notes            |
722  *+--------------------+---------------+--------------------+------------------+
723  *| INACTIVE           |               | INACTIVE           | Never changes    |
724  *+--------------------+---------------+--------------------+------------------+
725  *
726  *                            mode = AF_MODE_AUTO or AF_MODE_MACRO
727  *| state              | trans. cause  | new state          | notes            |
728  *+--------------------+---------------+--------------------+------------------+
729  *| INACTIVE           | AF_TRIGGER    | ACTIVE_SCAN        | Start AF sweep   |
730  *|                    |               |                    | Lens now moving  |
731  *+--------------------+---------------+--------------------+------------------+
732  *| ACTIVE_SCAN        | AF sweep done | FOCUSED_LOCKED     | If AF successful |
733  *|                    |               |                    | Lens now locked  |
734  *+--------------------+---------------+--------------------+------------------+
735  *| ACTIVE_SCAN        | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful |
736  *|                    |               |                    | Lens now locked  |
737  *+--------------------+---------------+--------------------+------------------+
738  *| ACTIVE_SCAN        | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
739  *|                    |               |                    | Lens now locked  |
740  *+--------------------+---------------+--------------------+------------------+
741  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
742  *+--------------------+---------------+--------------------+------------------+
743  *| FOCUSED_LOCKED     | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
744  *|                    |               |                    | Lens now moving  |
745  *+--------------------+---------------+--------------------+------------------+
746  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
747  *+--------------------+---------------+--------------------+------------------+
748  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
749  *|                    |               |                    | Lens now moving  |
750  *+--------------------+---------------+--------------------+------------------+
751  *| All states         | mode change   | INACTIVE           |                  |
752  *+--------------------+---------------+--------------------+------------------+
753  *
754  *                            mode = AF_MODE_CONTINUOUS_VIDEO
755  *| state              | trans. cause  | new state          | notes            |
756  *+--------------------+---------------+--------------------+------------------+
757  *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
758  *|                    | new scan      |                    | Lens now moving  |
759  *+--------------------+---------------+--------------------+------------------+
760  *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
761  *|                    |               |                    | Lens now locked  |
762  *+--------------------+---------------+--------------------+------------------+
763  *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
764  *|                    | current scan  |                    | Lens now locked  |
765  *+--------------------+---------------+--------------------+------------------+
766  *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
767  *|                    | current scan  |                    | Lens now locked  |
768  *+--------------------+---------------+--------------------+------------------+
769  *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
770  *|                    |               |                    | if focus is good |
771  *|                    |               |                    | Lens now locked  |
772  *+--------------------+---------------+--------------------+------------------+
773  *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
774  *|                    |               |                    | if focus is bad  |
775  *|                    |               |                    | Lens now locked  |
776  *+--------------------+---------------+--------------------+------------------+
777  *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
778  *|                    |               |                    | position         |
779  *|                    |               |                    | Lens now locked  |
780  *+--------------------+---------------+--------------------+------------------+
781  *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
782  *|                    | new scan      |                    | Lens now moving  |
783  *+--------------------+---------------+--------------------+------------------+
784  *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
785  *|                    | new scan      |                    | Lens now moving  |
786  *+--------------------+---------------+--------------------+------------------+
787  *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
788  *|                    |               |                    | Lens now locked  |
789  *+--------------------+---------------+--------------------+------------------+
790  *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
791  *|                    |               |                    | Lens now locked  |
792  *+--------------------+---------------+--------------------+------------------+
793  *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
794  *+--------------------+---------------+--------------------+------------------+
795  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
796  *+--------------------+---------------+--------------------+------------------+
797  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
798  *+--------------------+---------------+--------------------+------------------+
799  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
800  *+--------------------+---------------+--------------------+------------------+
801  *
802  *                            mode = AF_MODE_CONTINUOUS_PICTURE
803  *| state              | trans. cause  | new state          | notes            |
804  *+--------------------+---------------+--------------------+------------------+
805  *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
806  *|                    | new scan      |                    | Lens now moving  |
807  *+--------------------+---------------+--------------------+------------------+
808  *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
809  *|                    |               |                    | Lens now locked  |
810  *+--------------------+---------------+--------------------+------------------+
811  *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
812  *|                    | current scan  |                    | Lens now locked  |
813  *+--------------------+---------------+--------------------+------------------+
814  *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
815  *|                    | current scan  |                    | Lens now locked  |
816  *+--------------------+---------------+--------------------+------------------+
817  *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Eventual trans.  |
818  *|                    |               |                    | once focus good  |
819  *|                    |               |                    | Lens now locked  |
820  *+--------------------+---------------+--------------------+------------------+
821  *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Eventual trans.  |
822  *|                    |               |                    | if cannot focus  |
823  *|                    |               |                    | Lens now locked  |
824  *+--------------------+---------------+--------------------+------------------+
825  *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
826  *|                    |               |                    | position         |
827  *|                    |               |                    | Lens now locked  |
828  *+--------------------+---------------+--------------------+------------------+
829  *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
830  *|                    | new scan      |                    | Lens now moving  |
831  *+--------------------+---------------+--------------------+------------------+
832  *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
833  *|                    | new scan      |                    | Lens now moving  |
834  *+--------------------+---------------+--------------------+------------------+
835  *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
836  *|                    |               |                    | Lens now locked  |
837  *+--------------------+---------------+--------------------+------------------+
838  *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
839  *|                    |               |                    | Lens now locked  |
840  *+--------------------+---------------+--------------------+------------------+
841  *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
842  *+--------------------+---------------+--------------------+------------------+
843  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
844  *+--------------------+---------------+--------------------+------------------+
845  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
846  *+--------------------+---------------+--------------------+------------------+
847  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
848  *+--------------------+---------------+--------------------+------------------+
849  *
850  * S4.6. AE and AWB state machines
851  *
852  *   The AE and AWB state machines are mostly identical. AE has additional
853  *   FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two
854  *   states should be ignored for the AWB state machine.
855  *
856  *                  when enabling AE/AWB or changing AE/AWB mode
857  *| state              | trans. cause  | new state          | notes            |
858  *+--------------------+---------------+--------------------+------------------+
859  *| Any                |  mode change  | INACTIVE           |                  |
860  *+--------------------+---------------+--------------------+------------------+
861  *
862  *                            mode = AE_MODE_OFF / AWB mode not AUTO
863  *| state              | trans. cause  | new state          | notes            |
864  *+--------------------+---------------+--------------------+------------------+
865  *| INACTIVE           |               | INACTIVE           | AE/AWB disabled  |
866  *+--------------------+---------------+--------------------+------------------+
867  *
868  *                            mode = AE_MODE_ON_* / AWB_MODE_AUTO
869  *| state              | trans. cause  | new state          | notes            |
870  *+--------------------+---------------+--------------------+------------------+
871  *| INACTIVE           | HAL initiates | SEARCHING          |                  |
872  *|                    | AE/AWB scan   |                    |                  |
873  *+--------------------+---------------+--------------------+------------------+
874  *| INACTIVE           | AE/AWB_LOCK   | LOCKED             | values locked    |
875  *|                    | on            |                    |                  |
876  *+--------------------+---------------+--------------------+------------------+
877  *| SEARCHING          | HAL finishes  | CONVERGED          | good values, not |
878  *|                    | AE/AWB scan   |                    | changing         |
879  *+--------------------+---------------+--------------------+------------------+
880  *| SEARCHING          | HAL finishes  | FLASH_REQUIRED     | converged but too|
881  *|                    | AE scan       |                    | dark w/o flash   |
882  *+--------------------+---------------+--------------------+------------------+
883  *| SEARCHING          | AE/AWB_LOCK   | LOCKED             | values locked    |
884  *|                    | on            |                    |                  |
885  *+--------------------+---------------+--------------------+------------------+
886  *| CONVERGED          | HAL initiates | SEARCHING          | values locked    |
887  *|                    | AE/AWB scan   |                    |                  |
888  *+--------------------+---------------+--------------------+------------------+
889  *| CONVERGED          | AE/AWB_LOCK   | LOCKED             | values locked    |
890  *|                    | on            |                    |                  |
891  *+--------------------+---------------+--------------------+------------------+
892  *| FLASH_REQUIRED     | HAL initiates | SEARCHING          | values locked    |
893  *|                    | AE/AWB scan   |                    |                  |
894  *+--------------------+---------------+--------------------+------------------+
895  *| FLASH_REQUIRED     | AE/AWB_LOCK   | LOCKED             | values locked    |
896  *|                    | on            |                    |                  |
897  *+--------------------+---------------+--------------------+------------------+
898  *| LOCKED             | AE/AWB_LOCK   | SEARCHING          | values not good  |
899  *|                    | off           |                    | after unlock     |
900  *+--------------------+---------------+--------------------+------------------+
901  *| LOCKED             | AE/AWB_LOCK   | CONVERGED          | values good      |
902  *|                    | off           |                    | after unlock     |
903  *+--------------------+---------------+--------------------+------------------+
904  *| LOCKED             | AE_LOCK       | FLASH_REQUIRED     | exposure good,   |
905  *|                    | off           |                    | but too dark     |
906  *+--------------------+---------------+--------------------+------------------+
907  *| All AE states      | PRECAPTURE_   | PRECAPTURE         | Start precapture |
908  *|                    | START         |                    | sequence         |
909  *+--------------------+---------------+--------------------+------------------+
910  *| PRECAPTURE         | Sequence done.| CONVERGED          | Ready for high-  |
911  *|                    | AE_LOCK off   |                    | quality capture  |
912  *+--------------------+---------------+--------------------+------------------+
913  *| PRECAPTURE         | Sequence done.| LOCKED             | Ready for high-  |
914  *|                    | AE_LOCK on    |                    | quality capture  |
915  *+--------------------+---------------+--------------------+------------------+
916  *
917  */
918 
919 /**
920  * S5. Cropping:
921  *
922  * Cropping of the full pixel array (for digital zoom and other use cases where
923  * a smaller FOV is desirable) is communicated through the
924  * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can
925  * change on a per-request basis, which is critical for implementing smooth
926  * digital zoom.
927  *
928  * The region is defined as a rectangle (x, y, width, height), with (x, y)
929  * describing the top-left corner of the rectangle. The rectangle is defined on
930  * the coordinate system of the sensor active pixel array, with (0,0) being the
931  * top-left pixel of the active pixel array. Therefore, the width and height
932  * cannot be larger than the dimensions reported in the
933  * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed
934  * width and height are reported by the HAL through the
935  * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the
936  * maximum supported zoom factor. Therefore, the minimum crop region width and
937  * height are:
938  *
939  * {width, height} =
940  *    { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /
941  *        ANDROID_SCALER_MAX_DIGITAL_ZOOM),
942  *      floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /
943  *        ANDROID_SCALER_MAX_DIGITAL_ZOOM) }
944  *
945  * If the crop region needs to fulfill specific requirements (for example, it
946  * needs to start on even coordinates, and its width/height needs to be even),
947  * the HAL must do the necessary rounding and write out the final crop region
948  * used in the output result metadata. Similarly, if the HAL implements video
949  * stabilization, it must adjust the result crop region to describe the region
950  * actually included in the output after video stabilization is applied. In
951  * general, a camera-using application must be able to determine the field of
952  * view it is receiving based on the crop region, the dimensions of the image
953  * sensor, and the lens focal length.
954  *
955  * It is assumed that the cropping is applied after raw to other color space
956  * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage,
957  * and are not croppable. Therefore, the crop region must be ignored by the HAL
958  * for raw streams.
959  *
960  * Since the crop region applies to all non-raw streams, which may have different aspect
961  * ratios than the crop region, the exact sensor region used for each stream may
962  * be smaller than the crop region. Specifically, each stream should maintain
963  * square pixels and its aspect ratio by minimally further cropping the defined
964  * crop region. If the stream's aspect ratio is wider than the crop region, the
965  * stream should be further cropped vertically, and if the stream's aspect ratio
966  * is narrower than the crop region, the stream should be further cropped
967  * horizontally.
968  *
969  * In all cases, the stream crop must be centered within the full crop region,
970  * and each stream is only either cropped horizontally or vertical relative to
971  * the full crop region, never both.
972  *
973  * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a
974  * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions
975  * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x
976  * 1500 pixel array) sensor.
977  *
978  * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
979  *
980  *   640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)
981  *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
982  *
983  * 0                   1000               2000
984  * +---------+---------+---------+----------+
985  * | Active pixel array                     |
986  * |                                        |
987  * |                                        |
988  * +         +-------------------+          + 375
989  * |         |                   |          |
990  * |         O===================O          |
991  * |         I 1280x720 stream   I          |
992  * +         I                   I          + 750
993  * |         I                   I          |
994  * |         O===================O          |
995  * |         |                   |          |
996  * +         +-------------------+          + 1125
997  * |          Crop region, 640x480 stream   |
998  * |                                        |
999  * |                                        |
1000  * +---------+---------+---------+----------+ 1500
1001  *
1002  * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)
1003  *
1004  *   640x480 stream crop: (666, 375, 1000, 750) (marked with =)
1005  *   1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)
1006  *
1007  * 0                   1000               2000
1008  * +---------+---------+---------+----------+
1009  * | Active pixel array                     |
1010  * |                                        |
1011  * |                                        |
1012  * +         +---O==================O---+   + 375
1013  * |         |   I 640x480 stream   I   |   |
1014  * |         |   I                  I   |   |
1015  * |         |   I                  I   |   |
1016  * +         |   I                  I   |   + 750
1017  * |         |   I                  I   |   |
1018  * |         |   I                  I   |   |
1019  * |         |   I                  I   |   |
1020  * +         +---O==================O---+   + 1125
1021  * |          Crop region, 1280x720 stream  |
1022  * |                                        |
1023  * |                                        |
1024  * +---------+---------+---------+----------+ 1500
1025  *
1026  * Crop region: (500, 375, 750, 750) (1:1 aspect ratio)
1027  *
1028  *   640x480 stream crop: (500, 469, 750, 562) (marked with =)
1029  *   1280x720 stream crop: (500, 543, 750, 414) (marged with #)
1030  *
1031  * 0                   1000               2000
1032  * +---------+---------+---------+----------+
1033  * | Active pixel array                     |
1034  * |                                        |
1035  * |                                        |
1036  * +         +--------------+               + 375
1037  * |         O==============O               |
1038  * |         ################               |
1039  * |         #              #               |
1040  * +         #              #               + 750
1041  * |         #              #               |
1042  * |         ################ 1280x720      |
1043  * |         O==============O 640x480       |
1044  * +         +--------------+               + 1125
1045  * |          Crop region                   |
1046  * |                                        |
1047  * |                                        |
1048  * +---------+---------+---------+----------+ 1500
1049  *
1050  * And a final example, a 1024x1024 square aspect ratio stream instead of the
1051  * 480p stream:
1052  *
1053  * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
1054  *
1055  *   1024x1024 stream crop: (625, 375, 750, 750) (marked with #)
1056  *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
1057  *
1058  * 0                   1000               2000
1059  * +---------+---------+---------+----------+
1060  * | Active pixel array                     |
1061  * |                                        |
1062  * |              1024x1024 stream          |
1063  * +         +--###############--+          + 375
1064  * |         |  #             #  |          |
1065  * |         O===================O          |
1066  * |         I 1280x720 stream   I          |
1067  * +         I                   I          + 750
1068  * |         I                   I          |
1069  * |         O===================O          |
1070  * |         |  #             #  |          |
1071  * +         +--###############--+          + 1125
1072  * |          Crop region                   |
1073  * |                                        |
1074  * |                                        |
1075  * +---------+---------+---------+----------+ 1500
1076  *
1077  */
1078 
1079 /**
1080  * S6. Error management:
1081  *
1082  * Camera HAL device ops functions that have a return value will all return
1083  * -ENODEV / NULL in case of a serious error. This means the device cannot
1084  * continue operation, and must be closed by the framework. Once this error is
1085  * returned by some method, or if notify() is called with ERROR_DEVICE, only
1086  * the close() method can be called successfully. All other methods will return
1087  * -ENODEV / NULL.
1088  *
1089  * If a device op is called in the wrong sequence, for example if the framework
1090  * calls configure_streams() is called before initialize(), the device must
1091  * return -ENOSYS from the call, and do nothing.
1092  *
1093  * Transient errors in image capture must be reported through notify() as follows:
1094  *
1095  * - The failure of an entire capture to occur must be reported by the HAL by
1096  *   calling notify() with ERROR_REQUEST. Individual errors for the result
1097  *   metadata or the output buffers must not be reported in this case.
1098  *
1099  * - If the metadata for a capture cannot be produced, but some image buffers
1100  *   were filled, the HAL must call notify() with ERROR_RESULT.
1101  *
1102  * - If an output image buffer could not be filled, but either the metadata was
1103  *   produced or some other buffers were filled, the HAL must call notify() with
1104  *   ERROR_BUFFER for each failed buffer.
1105  *
1106  * In each of these transient failure cases, the HAL must still call
1107  * process_capture_result, with valid output and input (if an input buffer was
1108  * submitted) buffer_handle_t. If the result metadata could not be produced, it
1109  * should be NULL. If some buffers could not be filled, they must be returned with
1110  * process_capture_result in the error state, their release fences must be set to
1111  * the acquire fences passed by the framework, or -1 if they have been waited on by
1112  * the HAL already.
1113  *
1114  * Invalid input arguments result in -EINVAL from the appropriate methods. In
1115  * that case, the framework must act as if that call had never been made.
1116  *
1117  */
1118 
1119 /**
1120  * S7. Key Performance Indicator (KPI) glossary:
1121  *
1122  * This includes some critical definitions that are used by KPI metrics.
1123  *
1124  * Pipeline Latency:
1125  *  For a given capture request, the duration from the framework calling
1126  *  process_capture_request to the HAL sending capture result and all buffers
1127  *  back by process_capture_result call. To make the Pipeline Latency measure
1128  *  independent of frame rate, it is measured by frame count.
1129  *
1130  *  For example, when frame rate is 30 (fps), the frame duration (time interval
1131  *  between adjacent frame capture time) is 33 (ms).
1132  *  If it takes 5 frames for framework to get the result and buffers back for
1133  *  a given request, then the Pipeline Latency is 5 (frames), instead of
1134  *  5 x 33 = 165 (ms).
1135  *
1136  *  The Pipeline Latency is determined by android.request.pipelineDepth and
1137  *  android.request.pipelineMaxDepth, see their definitions for more details.
1138  *
1139  */
1140 
1141 /**
1142  * S8. Sample Use Cases:
1143  *
1144  * This includes some typical use case examples the camera HAL may support.
1145  *
1146  * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream.
1147  *
1148  *   For this use case, the bidirectional stream will be used by the framework as follows:
1149  *
1150  *   1. The framework includes a buffer from this stream as output buffer in a
1151  *      request as normal.
1152  *
1153  *   2. Once the HAL device returns a filled output buffer to the framework,
1154  *      the framework may do one of two things with the filled buffer:
1155  *
1156  *   2. a. The framework uses the filled data, and returns the now-used buffer
1157  *         to the stream queue for reuse. This behavior exactly matches the
1158  *         OUTPUT type of stream.
1159  *
1160  *   2. b. The framework wants to reprocess the filled data, and uses the
1161  *         buffer as an input buffer for a request. Once the HAL device has
1162  *         used the reprocessing buffer, it then returns it to the
1163  *         framework. The framework then returns the now-used buffer to the
1164  *         stream queue for reuse.
1165  *
1166  *   3. The HAL device will be given the buffer again as an output buffer for
1167  *        a request at some future point.
1168  *
1169  *   For ZSL use case, the pixel format for bidirectional stream will be
1170  *   HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it
1171  *   is listed in android.scaler.availableInputOutputFormatsMap. When
1172  *   HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc
1173  *   usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL.
1174  *   A configuration stream list that has BIDIRECTIONAL stream used as input, will
1175  *   usually also have a distinct OUTPUT stream to get the reprocessing data. For example,
1176  *   for the ZSL use case, the stream list might be configured with the following:
1177  *
1178  *     - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used
1179  *       as input.
1180  *     - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream.
1181  *
1182  * S8.2 ZSL (OPAQUE) reprocessing with CAMERA3_STREAM_INPUT stream.
1183  *
1184  * CAMERA_DEVICE_API_VERSION_3_3:
1185  *   When OPAQUE_REPROCESSING capability is supported by the camera device, the INPUT stream
1186  *   can be used for application/framework implemented use case like Zero Shutter Lag (ZSL).
1187  *   This kind of stream will be used by the framework as follows:
1188  *
1189  *   1. Application/framework configures an opaque (RAW or YUV based) format output stream that is
1190  *      used to produce the ZSL output buffers. The stream pixel format will be
1191  *      HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.
1192  *
1193  *   2. Application/framework configures an opaque format input stream that is used to
1194  *      send the reprocessing ZSL buffers to the HAL. The stream pixel format will
1195  *      also be HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.
1196  *
1197  *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the
1198  *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.
1199  *
1200  *   4. Application/framework picks a ZSL buffer from the ZSL output stream when a ZSL capture is
1201  *      issued by the application, and sends the data back as an input buffer in a
1202  *      reprocessing request, then sends to the HAL for reprocessing.
1203  *
1204  *   5. The HAL sends back the output YUV/JPEG result to framework.
1205  *
1206  *   The HAL can select the actual opaque buffer format and configure the ISP pipeline
1207  *   appropriately based on the HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format and
1208  *   the gralloc usage flag GRALLOC_USAGE_HW_CAMERA_ZSL.
1209 
1210  * S8.3 YUV reprocessing with CAMERA3_STREAM_INPUT stream.
1211  *
1212  *   When YUV reprocessing is supported by the HAL, the INPUT stream
1213  *   can be used for the YUV reprocessing use cases like lucky-shot and image fusion.
1214  *   This kind of stream will be used by the framework as follows:
1215  *
1216  *   1. Application/framework configures an YCbCr_420 format output stream that is
1217  *      used to produce the output buffers.
1218  *
1219  *   2. Application/framework configures an YCbCr_420 format input stream that is used to
1220  *      send the reprocessing YUV buffers to the HAL.
1221  *
1222  *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the
1223  *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.
1224  *
1225  *   4. Application/framework processes the output buffers (could be as simple as picking
1226  *      an output buffer directly) from the output stream when a capture is issued, and sends
1227  *      the data back as an input buffer in a reprocessing request, then sends to the HAL
1228  *      for reprocessing.
1229  *
1230  *   5. The HAL sends back the output YUV/JPEG result to framework.
1231  *
1232  */
1233 
1234 /**
1235  *   S9. Notes on Controls and Metadata
1236  *
1237  *   This section contains notes about the interpretation and usage of various metadata tags.
1238  *
1239  *   S9.1 HIGH_QUALITY and FAST modes.
1240  *
1241  *   Many camera post-processing blocks may be listed as having HIGH_QUALITY,
1242  *   FAST, and OFF operating modes. These blocks will typically also have an
1243  *   'available modes' tag representing which of these operating modes are
1244  *   available on a given device. The general policy regarding implementing
1245  *   these modes is as follows:
1246  *
1247  *   1. Operating mode controls of hardware blocks that cannot be disabled
1248  *      must not list OFF in their corresponding 'available modes' tags.
1249  *
1250  *   2. OFF will always be included in their corresponding 'available modes'
1251  *      tag if it is possible to disable that hardware block.
1252  *
1253  *   3. FAST must always be included in the 'available modes' tags for all
1254  *      post-processing blocks supported on the device.  If a post-processing
1255  *      block also has a slower and higher quality operating mode that does
1256  *      not meet the framerate requirements for FAST mode, HIGH_QUALITY should
1257  *      be included in the 'available modes' tag to represent this operating
1258  *      mode.
1259  */
1260 
1261 /**
1262  *   S10. Reprocessing flow and controls
1263  *
1264  *   This section describes the OPAQUE and YUV reprocessing flow and controls. OPAQUE reprocessing
1265  *   uses an opaque format that is not directly application-visible, and the application can
1266  *   only select some of the output buffers and send back to HAL for reprocessing, while YUV
1267  *   reprocessing gives the application opportunity to process the buffers before reprocessing.
1268  *
1269  *   S8 gives the stream configurations for the typical reprocessing uses cases,
1270  *   this section specifies the buffer flow and controls in more details.
1271  *
1272  *   S10.1 OPAQUE (typically for ZSL use case) reprocessing flow and controls
1273  *
1274  *   For OPAQUE reprocessing (e.g. ZSL) use case, after the application creates the specific
1275  *   output and input streams, runtime buffer flow and controls are specified as below:
1276  *
1277  *   1. Application starts output streaming by sending repeating requests for output
1278  *      opaque buffers and preview. The buffers are held by an application
1279  *      maintained circular buffer. The requests are based on CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG
1280  *      capture template, which should have all necessary settings that guarantee output
1281  *      frame rate is not slowed down relative to sensor output frame rate.
1282  *
1283  *   2. When a capture is issued, the application selects one output buffer based
1284  *      on application buffer selection logic, e.g. good AE and AF statistics etc.
1285  *      Application then creates an reprocess request based on the capture result associated
1286  *      with this selected buffer. The selected output buffer is now added to this reprocess
1287  *      request as an input buffer, the output buffer of this reprocess request should be
1288  *      either JPEG output buffer or YUV output buffer, or both, depending on the application
1289  *      choice.
1290  *
1291  *   3. Application then alters the reprocess settings to get best image quality. The HAL must
1292  *      support and only support below controls if the HAL support OPAQUE_REPROCESSING capability:
1293  *          - android.jpeg.* (if JPEG buffer is included as one of the output)
1294  *          - android.noiseReduction.mode (change to HIGH_QUALITY if it is supported)
1295  *          - android.edge.mode (change to HIGH_QUALITY if it is supported)
1296  *       All other controls must be ignored by the HAL.
1297  *   4. HAL processed the input buffer and return the output buffers in the capture results
1298  *      as normal.
1299  *
1300  *   S10.2 YUV reprocessing flow and controls
1301  *
1302  *   The YUV reprocessing buffer flow is similar as OPAQUE reprocessing, with below difference:
1303  *
1304  *   1. Application may want to have finer granularity control of the intermediate YUV images
1305  *      (before reprocessing). For example, application may choose
1306  *          - android.noiseReduction.mode == MINIMAL
1307  *      to make sure the no YUV domain noise reduction has applied to the output YUV buffers,
1308  *      then it can do its own advanced noise reduction on them. For OPAQUE reprocessing case, this
1309  *      doesn't matter, as long as the final reprocessed image has the best quality.
1310  *   2. Application may modify the YUV output buffer data. For example, for image fusion use
1311  *      case, where multiple output images are merged together to improve the signal-to-noise
1312  *      ratio (SNR). The input buffer may be generated from multiple buffers by the application.
1313  *      To avoid excessive amount of noise reduction and insufficient amount of edge enhancement
1314  *      being applied to the input buffer, the application can hint the HAL  how much effective
1315  *      exposure time improvement has been done by the application, then the HAL can adjust the
1316  *      noise reduction and edge enhancement paramters to get best reprocessed image quality.
1317  *      Below tag can be used for this purpose:
1318  *          - android.reprocess.effectiveExposureFactor
1319  *      The value would be exposure time increase factor applied to the original output image,
1320  *      for example, if there are N image merged, the exposure time increase factor would be up
1321  *      to sqrt(N). See this tag spec for more details.
1322  *
1323  *   S10.3 Reprocessing pipeline characteristics
1324  *
1325  *   Reprocessing pipeline has below different characteristics comparing with normal output
1326  *   pipeline:
1327  *
1328  *   1. The reprocessing result can be returned ahead of the pending normal output results. But
1329  *      the FIFO ordering must be maintained for all reprocessing results. For example, there are
1330  *      below requests (A stands for output requests, B stands for reprocessing requests)
1331  *      being processed by the HAL:
1332  *          A1, A2, A3, A4, B1, A5, B2, A6...
1333  *      result of B1 can be returned before A1-A4, but result of B2 must be returned after B1.
1334  *   2. Single input rule: For a given reprocessing request, all output buffers must be from the
1335  *      input buffer, rather than sensor output. For example, if a reprocess request include both
1336  *      JPEG and preview buffers, all output buffers must be produced from the input buffer
1337  *      included by the reprocessing request, rather than sensor. The HAL must not output preview
1338  *      buffers from sensor, while output JPEG buffer from the input buffer.
1339  *   3. Input buffer will be from camera output directly (ZSL case) or indirectly(image fusion
1340  *      case). For the case where buffer is modified, the size will remain same. The HAL can
1341  *      notify CAMERA3_MSG_ERROR_REQUEST if buffer from unknown source is sent.
1342  *   4. Result as reprocessing request: The HAL can expect that a reprocessing request is a copy
1343  *      of one of the output results with minor allowed setting changes. The HAL can notify
1344  *      CAMERA3_MSG_ERROR_REQUEST if a request from unknown source is issued.
1345  *   5. Output buffers may not be used as inputs across the configure stream boundary, This is
1346  *      because an opaque stream like the ZSL output stream may have different actual image size
1347  *      inside of the ZSL buffer to save power and bandwidth for smaller resolution JPEG capture.
1348  *      The HAL may notify CAMERA3_MSG_ERROR_REQUEST if this case occurs.
1349  *   6. HAL Reprocess requests error reporting during flush should follow the same rule specified
1350  *      by flush() method.
1351  *
1352  */
1353 
1354 __BEGIN_DECLS
1355 
1356 struct camera3_device;
1357 
1358 /**********************************************************************
1359  *
1360  * Camera3 stream and stream buffer definitions.
1361  *
1362  * These structs and enums define the handles and contents of the input and
1363  * output streams connecting the HAL to various framework and application buffer
1364  * consumers. Each stream is backed by a gralloc buffer queue.
1365  *
1366  */
1367 
1368 /**
1369  * camera3_stream_type_t:
1370  *
1371  * The type of the camera stream, which defines whether the camera HAL device is
1372  * the producer or the consumer for that stream, and how the buffers of the
1373  * stream relate to the other streams.
1374  */
1375 typedef enum camera3_stream_type {
1376     /**
1377      * This stream is an output stream; the camera HAL device will be
1378      * responsible for filling buffers from this stream with newly captured or
1379      * reprocessed image data.
1380      */
1381     CAMERA3_STREAM_OUTPUT = 0,
1382 
1383     /**
1384      * This stream is an input stream; the camera HAL device will be responsible
1385      * for reading buffers from this stream and sending them through the camera
1386      * processing pipeline, as if the buffer was a newly captured image from the
1387      * imager.
1388      *
1389      * The pixel format for input stream can be any format reported by
1390      * android.scaler.availableInputOutputFormatsMap. The pixel format of the
1391      * output stream that is used to produce the reprocessing data may be any
1392      * format reported by android.scaler.availableStreamConfigurations. The
1393      * supported input/output stream combinations depends the camera device
1394      * capabilities, see android.scaler.availableInputOutputFormatsMap for
1395      * stream map details.
1396      *
1397      * This kind of stream is generally used to reprocess data into higher
1398      * quality images (that otherwise would cause a frame rate performance
1399      * loss), or to do off-line reprocessing.
1400      *
1401      * CAMERA_DEVICE_API_VERSION_3_3:
1402      *    The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing,
1403      *    see S8.2, S8.3 and S10 for more details.
1404      */
1405     CAMERA3_STREAM_INPUT = 1,
1406 
1407     /**
1408      * This stream can be used for input and output. Typically, the stream is
1409      * used as an output stream, but occasionally one already-filled buffer may
1410      * be sent back to the HAL device for reprocessing.
1411      *
1412      * This kind of stream is meant generally for Zero Shutter Lag (ZSL)
1413      * features, where copying the captured image from the output buffer to the
1414      * reprocessing input buffer would be expensive. See S8.1 for more details.
1415      *
1416      * Note that the HAL will always be reprocessing data it produced.
1417      *
1418      */
1419     CAMERA3_STREAM_BIDIRECTIONAL = 2,
1420 
1421     /**
1422      * Total number of framework-defined stream types
1423      */
1424     CAMERA3_NUM_STREAM_TYPES
1425 
1426 } camera3_stream_type_t;
1427 
1428 /**
1429  * camera3_stream_rotation_t:
1430  *
1431  * The required counterclockwise rotation of camera stream.
1432  */
1433 typedef enum camera3_stream_rotation {
1434     /* No rotation */
1435     CAMERA3_STREAM_ROTATION_0 = 0,
1436 
1437     /* Rotate by 90 degree counterclockwise */
1438     CAMERA3_STREAM_ROTATION_90 = 1,
1439 
1440     /* Rotate by 180 degree counterclockwise */
1441     CAMERA3_STREAM_ROTATION_180 = 2,
1442 
1443     /* Rotate by 270 degree counterclockwise */
1444     CAMERA3_STREAM_ROTATION_270 = 3
1445 } camera3_stream_rotation_t;
1446 
1447 /**
1448  * camera3_stream_configuration_mode_t:
1449  *
1450  * This defines the general operation mode for the HAL (for a given stream configuration), where
1451  * modes besides NORMAL have different semantics, and usually limit the generality of the API in
1452  * exchange for higher performance in some particular area.
1453  */
1454 typedef enum camera3_stream_configuration_mode {
1455     /**
1456      * Normal stream configuration operation mode. This is the default camera operation mode,
1457      * where all semantics of HAL APIs and metadata controls apply.
1458      */
1459     CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE = 0,
1460 
1461     /**
1462      * Special constrained high speed operation mode for devices that can not support high
1463      * speed output in NORMAL mode. All streams in this configuration are operating at high speed
1464      * mode and have different characteristics and limitations to achieve high speed output.
1465      * The NORMAL mode can still be used for high speed output if the HAL can support high speed
1466      * output while satisfying all the semantics of HAL APIs and metadata controls. It is
1467      * recommended for the HAL to support high speed output in NORMAL mode (by advertising the high
1468      * speed FPS ranges in android.control.aeAvailableTargetFpsRanges) if possible.
1469      *
1470      * This mode has below limitations/requirements:
1471      *
1472      *   1. The HAL must support up to 2 streams with sizes reported by
1473      *      android.control.availableHighSpeedVideoConfigurations.
1474      *   2. In this mode, the HAL is expected to output up to 120fps or higher. This mode must
1475      *      support the targeted FPS range and size configurations reported by
1476      *      android.control.availableHighSpeedVideoConfigurations.
1477      *   3. The HAL must support HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED output stream format.
1478      *   4. To achieve efficient high speed streaming, the HAL may have to aggregate
1479      *      multiple frames together and send to camera device for processing where the request
1480      *      controls are same for all the frames in this batch (batch mode). The HAL must support
1481      *      max batch size and the max batch size requirements defined by
1482      *      android.control.availableHighSpeedVideoConfigurations.
1483      *   5. In this mode, the HAL must override aeMode, awbMode, and afMode to ON, ON, and
1484      *      CONTINUOUS_VIDEO, respectively. All post-processing block mode controls must be
1485      *      overridden to be FAST. Therefore, no manual control of capture and post-processing
1486      *      parameters is possible. All other controls operate the same as when
1487      *      android.control.mode == AUTO. This means that all other android.control.* fields
1488      *      must continue to work, such as
1489      *
1490      *      android.control.aeTargetFpsRange
1491      *      android.control.aeExposureCompensation
1492      *      android.control.aeLock
1493      *      android.control.awbLock
1494      *      android.control.effectMode
1495      *      android.control.aeRegions
1496      *      android.control.afRegions
1497      *      android.control.awbRegions
1498      *      android.control.afTrigger
1499      *      android.control.aePrecaptureTrigger
1500      *
1501      *      Outside of android.control.*, the following controls must work:
1502      *
1503      *      android.flash.mode (TORCH mode only, automatic flash for still capture will not work
1504      *      since aeMode is ON)
1505      *      android.lens.opticalStabilizationMode (if it is supported)
1506      *      android.scaler.cropRegion
1507      *      android.statistics.faceDetectMode (if it is supported)
1508      *   6. To reduce the amount of data passed across process boundaries at
1509      *      high frame rate, within one batch, camera framework only propagates
1510      *      the last shutter notify and the last capture results (including partial
1511      *      results and final result) to the app. The shutter notifies and capture
1512      *      results for the other requests in the batch are derived by
1513      *      the camera framework. As a result, the HAL can return empty metadata
1514      *      except for the last result in the batch.
1515      *
1516      * For more details about high speed stream requirements, see
1517      * android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
1518      * capability defined in android.request.availableCapabilities.
1519      *
1520      * This mode only needs to be supported by HALs that include CONSTRAINED_HIGH_SPEED_VIDEO in
1521      * the android.request.availableCapabilities static metadata.
1522      */
1523     CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
1524 
1525     /**
1526      * First value for vendor-defined stream configuration modes.
1527      */
1528     CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
1529 } camera3_stream_configuration_mode_t;
1530 
1531 /**
1532  * camera3_stream_t:
1533  *
1534  * A handle to a single camera input or output stream. A stream is defined by
1535  * the framework by its buffer resolution and format, and additionally by the
1536  * HAL with the gralloc usage flags and the maximum in-flight buffer count.
1537  *
1538  * The stream structures are owned by the framework, but pointers to a
1539  * camera3_stream passed into the HAL by configure_streams() are valid until the
1540  * end of the first subsequent configure_streams() call that _does not_ include
1541  * that camera3_stream as an argument, or until the end of the close() call.
1542  *
1543  * All camera3_stream framework-controlled members are immutable once the
1544  * camera3_stream is passed into configure_streams().  The HAL may only change
1545  * the HAL-controlled parameters during a configure_streams() call, except for
1546  * the contents of the private pointer.
1547  *
1548  * If a configure_streams() call returns a non-fatal error, all active streams
1549  * remain valid as if configure_streams() had not been called.
1550  *
1551  * The endpoint of the stream is not visible to the camera HAL device.
1552  * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags
1553  * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream
1554  * types) see the usage field below.
1555  */
1556 typedef struct camera3_stream {
1557 
1558     /*****
1559      * Set by framework before configure_streams()
1560      */
1561 
1562     /**
1563      * The type of the stream, one of the camera3_stream_type_t values.
1564      */
1565     int stream_type;
1566 
1567     /**
1568      * The width in pixels of the buffers in this stream
1569      */
1570     uint32_t width;
1571 
1572     /**
1573      * The height in pixels of the buffers in this stream
1574      */
1575     uint32_t height;
1576 
1577     /**
1578      * The pixel format for the buffers in this stream. Format is a value from
1579      * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or
1580      * from device-specific headers.
1581      *
1582      * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
1583      * gralloc module will select a format based on the usage flags provided by
1584      * the camera device and the other endpoint of the stream.
1585      *
1586      * <= CAMERA_DEVICE_API_VERSION_3_1:
1587      *
1588      * The camera HAL device must inspect the buffers handed to it in the
1589      * subsequent register_stream_buffers() call to obtain the
1590      * implementation-specific format details, if necessary.
1591      *
1592      * >= CAMERA_DEVICE_API_VERSION_3_2:
1593      *
1594      * register_stream_buffers() won't be called by the framework, so the HAL
1595      * should configure the ISP and sensor pipeline based purely on the sizes,
1596      * usage flags, and formats for the configured streams.
1597      */
1598     int format;
1599 
1600     /*****
1601      * Set by HAL during configure_streams().
1602      */
1603 
1604     /**
1605      * The gralloc usage flags for this stream, as needed by the HAL. The usage
1606      * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific
1607      * headers.
1608      *
1609      * For output streams, these are the HAL's producer usage flags. For input
1610      * streams, these are the HAL's consumer usage flags. The usage flags from
1611      * the producer and the consumer will be combined together and then passed
1612      * to the platform gralloc HAL module for allocating the gralloc buffers for
1613      * each stream.
1614      *
1615      * Version information:
1616      *
1617      * == CAMERA_DEVICE_API_VERSION_3_0:
1618      *
1619      *   No initial value guaranteed when passed via configure_streams().
1620      *   HAL may not use this field as input, and must write over this field
1621      *   with its usage flags.
1622      *
1623      * >= CAMERA_DEVICE_API_VERSION_3_1:
1624      *
1625      *   For stream_type OUTPUT and BIDIRECTIONAL, when passed via
1626      *   configure_streams(), the initial value of this is the consumer's
1627      *   usage flags.  The HAL may use these consumer flags to decide stream
1628      *   configuration.
1629      *   For stream_type INPUT, when passed via configure_streams(), the initial
1630      *   value of this is 0.
1631      *   For all streams passed via configure_streams(), the HAL must write
1632      *   over this field with its usage flags.
1633      *
1634      *   From Android O, the usage flag for an output stream may be bitwise
1635      *   combination of usage flags for multiple consumers, for the purpose of
1636      *   sharing one camera stream between those consumers. The HAL must fail
1637      *   configure_streams call with -EINVAL if the combined flags cannot be
1638      *   supported due to imcompatible buffer format, dataSpace, or other hardware
1639      *   limitations.
1640      */
1641     uint32_t usage;
1642 
1643     /**
1644      * The maximum number of buffers the HAL device may need to have dequeued at
1645      * the same time. The HAL device may not have more buffers in-flight from
1646      * this stream than this value.
1647      */
1648     uint32_t max_buffers;
1649 
1650     /**
1651      * A handle to HAL-private information for the stream. Will not be inspected
1652      * by the framework code.
1653      */
1654     void *priv;
1655 
1656     /**
1657      * A field that describes the contents of the buffer. The format and buffer
1658      * dimensions define the memory layout and structure of the stream buffers,
1659      * while dataSpace defines the meaning of the data within the buffer.
1660      *
1661      * For most formats, dataSpace defines the color space of the image data.
1662      * In addition, for some formats, dataSpace indicates whether image- or
1663      * depth-based data is requested.  See system/core/include/system/graphics.h
1664      * for details of formats and valid dataSpace values for each format.
1665      *
1666      * Version information:
1667      *
1668      * < CAMERA_DEVICE_API_VERSION_3_3:
1669      *
1670      *   Not defined and should not be accessed. dataSpace should be assumed to
1671      *   be HAL_DATASPACE_UNKNOWN, and the appropriate color space, etc, should
1672      *   be determined from the usage flags and the format.
1673      *
1674      * = CAMERA_DEVICE_API_VERSION_3_3:
1675      *
1676      *   Always set by the camera service. HAL must use this dataSpace to
1677      *   configure the stream to the correct colorspace, or to select between
1678      *   color and depth outputs if supported. The dataspace values are the
1679      *   legacy definitions in graphics.h
1680      *
1681      * >= CAMERA_DEVICE_API_VERSION_3_4:
1682      *
1683      *   Always set by the camera service. HAL must use this dataSpace to
1684      *   configure the stream to the correct colorspace, or to select between
1685      *   color and depth outputs if supported. The dataspace values are set
1686      *   using the V0 dataspace definitions in graphics.h
1687      */
1688     android_dataspace_t data_space;
1689 
1690     /**
1691      * The required output rotation of the stream, one of
1692      * the camera3_stream_rotation_t values. This must be inspected by HAL along
1693      * with stream width and height. For example, if the rotation is 90 degree
1694      * and the stream width and height is 720 and 1280 respectively, camera service
1695      * will supply buffers of size 720x1280, and HAL should capture a 1280x720 image
1696      * and rotate the image by 90 degree counterclockwise. The rotation field is
1697      * no-op when the stream type is input. Camera HAL must ignore the rotation
1698      * field for an input stream.
1699      *
1700      * <= CAMERA_DEVICE_API_VERSION_3_2:
1701      *
1702      *    Not defined and must not be accessed. HAL must not apply any rotation
1703      *    on output images.
1704      *
1705      * >= CAMERA_DEVICE_API_VERSION_3_3:
1706      *
1707      *    Always set by camera service. HAL must inspect this field during stream
1708      *    configuration and returns -EINVAL if HAL cannot perform such rotation.
1709      *    HAL must always support CAMERA3_STREAM_ROTATION_0, so a
1710      *    configure_streams() call must not fail for unsupported rotation if
1711      *    rotation field of all streams is CAMERA3_STREAM_ROTATION_0.
1712      *
1713      */
1714     int rotation;
1715 
1716     /**
1717      * The physical camera id this stream belongs to.
1718      *
1719      * <= CAMERA_DEVICE_API_VERISON_3_4:
1720      *
1721      *    Not defined and must not be accessed.
1722      *
1723      * >= CAMERA_DEVICE_API_VERISON_3_5:
1724      *
1725      *    Always set by camera service. If the camera device is not a logical
1726      *    multi camera, or if the camera is a logical multi camera but the stream
1727      *    is not a physical output stream, this field will point to a 0-length
1728      *    string.
1729      *
1730      *    A logical multi camera is a camera device backed by multiple physical
1731      *    cameras that are also exposed to the application. And for a logical
1732      *    multi camera, a physical output stream is an output stream specifically
1733      *    requested on an underlying physical camera.
1734      *
1735      *    For an input stream, this field is guaranteed to be a 0-length string.
1736      */
1737     const char* physical_camera_id;
1738 
1739     /* reserved for future use */
1740     void *reserved[6];
1741 
1742 } camera3_stream_t;
1743 
1744 /**
1745  * camera3_stream_configuration_t:
1746  *
1747  * A structure of stream definitions, used by configure_streams(). This
1748  * structure defines all the output streams and the reprocessing input
1749  * stream for the current camera use case.
1750  */
1751 typedef struct camera3_stream_configuration {
1752     /**
1753      * The total number of streams requested by the framework.  This includes
1754      * both input and output streams. The number of streams will be at least 1,
1755      * and there will be at least one output-capable stream.
1756      */
1757     uint32_t num_streams;
1758 
1759     /**
1760      * An array of camera stream pointers, defining the input/output
1761      * configuration for the camera HAL device.
1762      *
1763      * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL)
1764      * in a single configuration.
1765      *
1766      * At least one output-capable stream must be defined (OUTPUT or
1767      * BIDIRECTIONAL).
1768      */
1769     camera3_stream_t **streams;
1770 
1771     /**
1772      * >= CAMERA_DEVICE_API_VERSION_3_3:
1773      *
1774      * The operation mode of streams in this configuration, one of the value
1775      * defined in camera3_stream_configuration_mode_t.  The HAL can use this
1776      * mode as an indicator to set the stream property (e.g.,
1777      * camera3_stream->max_buffers) appropriately. For example, if the
1778      * configuration is
1779      * CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE, the HAL may
1780      * want to set aside more buffers for batch mode operation (see
1781      * android.control.availableHighSpeedVideoConfigurations for batch mode
1782      * definition).
1783      *
1784      */
1785     uint32_t operation_mode;
1786 
1787     /**
1788      * >= CAMERA_DEVICE_API_VERSION_3_5:
1789      *
1790      * The session metadata buffer contains the initial values of
1791      * ANDROID_REQUEST_AVAILABLE_SESSION_KEYS. This field is optional
1792      * and camera clients can choose to ignore it, in which case it will
1793      * be set to NULL. If parameters are present, then Hal should examine
1794      * the parameter values and configure its internal camera pipeline
1795      * accordingly.
1796      */
1797     const camera_metadata_t *session_parameters;
1798 } camera3_stream_configuration_t;
1799 
1800 /**
1801  * camera3_buffer_status_t:
1802  *
1803  * The current status of a single stream buffer.
1804  */
1805 typedef enum camera3_buffer_status {
1806     /**
1807      * The buffer is in a normal state, and can be used after waiting on its
1808      * sync fence.
1809      */
1810     CAMERA3_BUFFER_STATUS_OK = 0,
1811 
1812     /**
1813      * The buffer does not contain valid data, and the data in it should not be
1814      * used. The sync fence must still be waited on before reusing the buffer.
1815      */
1816     CAMERA3_BUFFER_STATUS_ERROR = 1
1817 
1818 } camera3_buffer_status_t;
1819 
1820 /**
1821  * camera3_stream_buffer_t:
1822  *
1823  * A single buffer from a camera3 stream. It includes a handle to its parent
1824  * stream, the handle to the gralloc buffer itself, and sync fences
1825  *
1826  * The buffer does not specify whether it is to be used for input or output;
1827  * that is determined by its parent stream type and how the buffer is passed to
1828  * the HAL device.
1829  */
1830 typedef struct camera3_stream_buffer {
1831     /**
1832      * The handle of the stream this buffer is associated with
1833      */
1834     camera3_stream_t *stream;
1835 
1836     /**
1837      * The native handle to the buffer
1838      */
1839     buffer_handle_t *buffer;
1840 
1841     /**
1842      * Current state of the buffer, one of the camera3_buffer_status_t
1843      * values. The framework will not pass buffers to the HAL that are in an
1844      * error state. In case a buffer could not be filled by the HAL, it must
1845      * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the
1846      * framework with process_capture_result().
1847      */
1848     int status;
1849 
1850     /**
1851      * The acquire sync fence for this buffer. The HAL must wait on this fence
1852      * fd before attempting to read from or write to this buffer.
1853      *
1854      * The framework may be set to -1 to indicate that no waiting is necessary
1855      * for this buffer.
1856      *
1857      * When the HAL returns an output buffer to the framework with
1858      * process_capture_result(), the acquire_fence must be set to -1. If the HAL
1859      * never waits on the acquire_fence due to an error in filling a buffer,
1860      * when calling process_capture_result() the HAL must set the release_fence
1861      * of the buffer to be the acquire_fence passed to it by the framework. This
1862      * will allow the framework to wait on the fence before reusing the buffer.
1863      *
1864      * For input buffers, the HAL must not change the acquire_fence field during
1865      * the process_capture_request() call.
1866      *
1867      * >= CAMERA_DEVICE_API_VERSION_3_2:
1868      *
1869      * When the HAL returns an input buffer to the framework with
1870      * process_capture_result(), the acquire_fence must be set to -1. If the HAL
1871      * never waits on input buffer acquire fence due to an error, the sync
1872      * fences should be handled similarly to the way they are handled for output
1873      * buffers.
1874      */
1875      int acquire_fence;
1876 
1877     /**
1878      * The release sync fence for this buffer. The HAL must set this fence when
1879      * returning buffers to the framework, or write -1 to indicate that no
1880      * waiting is required for this buffer.
1881      *
1882      * For the output buffers, the fences must be set in the output_buffers
1883      * array passed to process_capture_result().
1884      *
1885      * <= CAMERA_DEVICE_API_VERSION_3_1:
1886      *
1887      * For the input buffer, the release fence must be set by the
1888      * process_capture_request() call.
1889      *
1890      * >= CAMERA_DEVICE_API_VERSION_3_2:
1891      *
1892      * For the input buffer, the fences must be set in the input_buffer
1893      * passed to process_capture_result().
1894      *
1895      * After signaling the release_fence for this buffer, the HAL
1896      * should not make any further attempts to access this buffer as the
1897      * ownership has been fully transferred back to the framework.
1898      *
1899      * If a fence of -1 was specified then the ownership of this buffer
1900      * is transferred back immediately upon the call of process_capture_result.
1901      */
1902     int release_fence;
1903 
1904 } camera3_stream_buffer_t;
1905 
1906 /**
1907  * camera3_stream_buffer_set_t:
1908  *
1909  * The complete set of gralloc buffers for a stream. This structure is given to
1910  * register_stream_buffers() to allow the camera HAL device to register/map/etc
1911  * newly allocated stream buffers.
1912  *
1913  * >= CAMERA_DEVICE_API_VERSION_3_2:
1914  *
1915  * Deprecated (and not used). In particular,
1916  * register_stream_buffers is also deprecated and will never be invoked.
1917  *
1918  */
1919 typedef struct camera3_stream_buffer_set {
1920     /**
1921      * The stream handle for the stream these buffers belong to
1922      */
1923     camera3_stream_t *stream;
1924 
1925     /**
1926      * The number of buffers in this stream. It is guaranteed to be at least
1927      * stream->max_buffers.
1928      */
1929     uint32_t num_buffers;
1930 
1931     /**
1932      * The array of gralloc buffer handles for this stream. If the stream format
1933      * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device
1934      * should inspect the passed-in buffers to determine any platform-private
1935      * pixel format information.
1936      */
1937     buffer_handle_t **buffers;
1938 
1939 } camera3_stream_buffer_set_t;
1940 
1941 /**
1942  * camera3_jpeg_blob:
1943  *
1944  * Transport header for compressed JPEG buffers in output streams.
1945  *
1946  * To capture JPEG images, a stream is created using the pixel format
1947  * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the
1948  * framework, based on the static metadata field android.jpeg.maxSize. Since
1949  * compressed JPEG images are of variable size, the HAL needs to include the
1950  * final size of the compressed image using this structure inside the output
1951  * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID.
1952  *
1953  * Transport header should be at the end of the JPEG output stream buffer. That
1954  * means the jpeg_blob_id must start at byte[buffer_size -
1955  * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer.
1956  * Any HAL using this transport header must account for it in android.jpeg.maxSize
1957  * The JPEG data itself starts at the beginning of the buffer and should be
1958  * jpeg_size bytes long.
1959  */
1960 typedef struct camera3_jpeg_blob {
1961     uint16_t jpeg_blob_id;
1962     uint32_t jpeg_size;
1963 } camera3_jpeg_blob_t;
1964 
1965 enum {
1966     CAMERA3_JPEG_BLOB_ID = 0x00FF
1967 };
1968 
1969 /**********************************************************************
1970  *
1971  * Message definitions for the HAL notify() callback.
1972  *
1973  * These definitions are used for the HAL notify callback, to signal
1974  * asynchronous events from the HAL device to the Android framework.
1975  *
1976  */
1977 
1978 /**
1979  * camera3_msg_type:
1980  *
1981  * Indicates the type of message sent, which specifies which member of the
1982  * message union is valid.
1983  *
1984  */
1985 typedef enum camera3_msg_type {
1986     /**
1987      * An error has occurred. camera3_notify_msg.message.error contains the
1988      * error information.
1989      */
1990     CAMERA3_MSG_ERROR = 1,
1991 
1992     /**
1993      * The exposure of a given request or processing a reprocess request has
1994      * begun. camera3_notify_msg.message.shutter contains the information
1995      * the capture.
1996      */
1997     CAMERA3_MSG_SHUTTER = 2,
1998 
1999     /**
2000      * Number of framework message types
2001      */
2002     CAMERA3_NUM_MESSAGES
2003 
2004 } camera3_msg_type_t;
2005 
2006 /**
2007  * Defined error codes for CAMERA_MSG_ERROR
2008  */
2009 typedef enum camera3_error_msg_code {
2010     /**
2011      * A serious failure occured. No further frames or buffer streams will
2012      * be produced by the device. Device should be treated as closed. The
2013      * client must reopen the device to use it again. The frame_number field
2014      * is unused.
2015      */
2016     CAMERA3_MSG_ERROR_DEVICE = 1,
2017 
2018     /**
2019      * An error has occurred in processing a request. No output (metadata or
2020      * buffers) will be produced for this request. The frame_number field
2021      * specifies which request has been dropped. Subsequent requests are
2022      * unaffected, and the device remains operational.
2023      */
2024     CAMERA3_MSG_ERROR_REQUEST = 2,
2025 
2026     /**
2027      * An error has occurred in producing an output result metadata buffer
2028      * for a request, but output stream buffers for it will still be
2029      * available. Subsequent requests are unaffected, and the device remains
2030      * operational.  The frame_number field specifies the request for which
2031      * result metadata won't be available.
2032      */
2033     CAMERA3_MSG_ERROR_RESULT = 3,
2034 
2035     /**
2036      * An error has occurred in placing an output buffer into a stream for a
2037      * request. The frame metadata and other buffers may still be
2038      * available. Subsequent requests are unaffected, and the device remains
2039      * operational. The frame_number field specifies the request for which the
2040      * buffer was dropped, and error_stream contains a pointer to the stream
2041      * that dropped the frame.
2042      */
2043     CAMERA3_MSG_ERROR_BUFFER = 4,
2044 
2045     /**
2046      * Number of error types
2047      */
2048     CAMERA3_MSG_NUM_ERRORS
2049 
2050 } camera3_error_msg_code_t;
2051 
2052 /**
2053  * camera3_error_msg_t:
2054  *
2055  * Message contents for CAMERA3_MSG_ERROR
2056  */
2057 typedef struct camera3_error_msg {
2058     /**
2059      * Frame number of the request the error applies to. 0 if the frame number
2060      * isn't applicable to the error.
2061      */
2062     uint32_t frame_number;
2063 
2064     /**
2065      * Pointer to the stream that had a failure. NULL if the stream isn't
2066      * applicable to the error.
2067      */
2068     camera3_stream_t *error_stream;
2069 
2070     /**
2071      * The code for this error; one of the CAMERA_MSG_ERROR enum values.
2072      */
2073     int error_code;
2074 
2075 } camera3_error_msg_t;
2076 
2077 /**
2078  * camera3_shutter_msg_t:
2079  *
2080  * Message contents for CAMERA3_MSG_SHUTTER
2081  */
2082 typedef struct camera3_shutter_msg {
2083     /**
2084      * Frame number of the request that has begun exposure or reprocessing.
2085      */
2086     uint32_t frame_number;
2087 
2088     /**
2089      * Timestamp for the start of capture. For a reprocess request, this must
2090      * be input image's start of capture. This must match the capture result
2091      * metadata's sensor exposure start timestamp.
2092      */
2093     uint64_t timestamp;
2094 
2095 } camera3_shutter_msg_t;
2096 
2097 /**
2098  * camera3_notify_msg_t:
2099  *
2100  * The message structure sent to camera3_callback_ops_t.notify()
2101  */
2102 typedef struct camera3_notify_msg {
2103 
2104     /**
2105      * The message type. One of camera3_notify_msg_type, or a private extension.
2106      */
2107     int type;
2108 
2109     union {
2110         /**
2111          * Error message contents. Valid if type is CAMERA3_MSG_ERROR
2112          */
2113         camera3_error_msg_t error;
2114 
2115         /**
2116          * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER
2117          */
2118         camera3_shutter_msg_t shutter;
2119 
2120         /**
2121          * Generic message contents. Used to ensure a minimum size for custom
2122          * message types.
2123          */
2124         uint8_t generic[32];
2125     } message;
2126 
2127 } camera3_notify_msg_t;
2128 
2129 /**********************************************************************
2130  *
2131  * Capture request/result definitions for the HAL process_capture_request()
2132  * method, and the process_capture_result() callback.
2133  *
2134  */
2135 
2136 /**
2137  * camera3_request_template_t:
2138  *
2139  * Available template types for
2140  * camera3_device_ops.construct_default_request_settings()
2141  */
2142 typedef enum camera3_request_template {
2143     /**
2144      * Standard camera preview operation with 3A on auto.
2145      */
2146     CAMERA3_TEMPLATE_PREVIEW = 1,
2147 
2148     /**
2149      * Standard camera high-quality still capture with 3A and flash on auto.
2150      */
2151     CAMERA3_TEMPLATE_STILL_CAPTURE = 2,
2152 
2153     /**
2154      * Standard video recording plus preview with 3A on auto, torch off.
2155      */
2156     CAMERA3_TEMPLATE_VIDEO_RECORD = 3,
2157 
2158     /**
2159      * High-quality still capture while recording video. Application will
2160      * include preview, video record, and full-resolution YUV or JPEG streams in
2161      * request. Must not cause stuttering on video stream. 3A on auto.
2162      */
2163     CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4,
2164 
2165     /**
2166      * Zero-shutter-lag mode. Application will request preview and
2167      * full-resolution data for each frame, and reprocess it to JPEG when a
2168      * still image is requested by user. Settings should provide highest-quality
2169      * full-resolution images without compromising preview frame rate. 3A on
2170      * auto.
2171      */
2172     CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5,
2173 
2174     /**
2175      * A basic template for direct application control of capture
2176      * parameters. All automatic control is disabled (auto-exposure, auto-white
2177      * balance, auto-focus), and post-processing parameters are set to preview
2178      * quality. The manual capture parameters (exposure, sensitivity, etc.)
2179      * are set to reasonable defaults, but should be overridden by the
2180      * application depending on the intended use case.
2181      */
2182     CAMERA3_TEMPLATE_MANUAL = 6,
2183 
2184     /* Total number of templates */
2185     CAMERA3_TEMPLATE_COUNT,
2186 
2187     /**
2188      * First value for vendor-defined request templates
2189      */
2190     CAMERA3_VENDOR_TEMPLATE_START = 0x40000000
2191 
2192 } camera3_request_template_t;
2193 
2194 /**
2195  * camera3_capture_request_t:
2196  *
2197  * A single request for image capture/buffer reprocessing, sent to the Camera
2198  * HAL device by the framework in process_capture_request().
2199  *
2200  * The request contains the settings to be used for this capture, and the set of
2201  * output buffers to write the resulting image data in. It may optionally
2202  * contain an input buffer, in which case the request is for reprocessing that
2203  * input buffer instead of capturing a new image with the camera sensor. The
2204  * capture is identified by the frame_number.
2205  *
2206  * In response, the camera HAL device must send a camera3_capture_result
2207  * structure asynchronously to the framework, using the process_capture_result()
2208  * callback.
2209  */
2210 typedef struct camera3_capture_request {
2211     /**
2212      * The frame number is an incrementing integer set by the framework to
2213      * uniquely identify this capture. It needs to be returned in the result
2214      * call, and is also used to identify the request in asynchronous
2215      * notifications sent to camera3_callback_ops_t.notify().
2216      */
2217     uint32_t frame_number;
2218 
2219     /**
2220      * The settings buffer contains the capture and processing parameters for
2221      * the request. As a special case, a NULL settings buffer indicates that the
2222      * settings are identical to the most-recently submitted capture request. A
2223      * NULL buffer cannot be used as the first submitted request after a
2224      * configure_streams() call.
2225      */
2226     const camera_metadata_t *settings;
2227 
2228     /**
2229      * The input stream buffer to use for this request, if any.
2230      *
2231      * If input_buffer is NULL, then the request is for a new capture from the
2232      * imager. If input_buffer is valid, the request is for reprocessing the
2233      * image contained in input_buffer.
2234      *
2235      * In the latter case, the HAL must set the release_fence of the
2236      * input_buffer to a valid sync fence, or to -1 if the HAL does not support
2237      * sync, before process_capture_request() returns.
2238      *
2239      * The HAL is required to wait on the acquire sync fence of the input buffer
2240      * before accessing it.
2241      *
2242      * <= CAMERA_DEVICE_API_VERSION_3_1:
2243      *
2244      * Any input buffer included here will have been registered with the HAL
2245      * through register_stream_buffers() before its inclusion in a request.
2246      *
2247      * >= CAMERA_DEVICE_API_VERSION_3_2:
2248      *
2249      * The buffers will not have been pre-registered with the HAL.
2250      * Subsequent requests may reuse buffers, or provide entirely new buffers.
2251      */
2252     camera3_stream_buffer_t *input_buffer;
2253 
2254     /**
2255      * The number of output buffers for this capture request. Must be at least
2256      * 1.
2257      */
2258     uint32_t num_output_buffers;
2259 
2260     /**
2261      * An array of num_output_buffers stream buffers, to be filled with image
2262      * data from this capture/reprocess. The HAL must wait on the acquire fences
2263      * of each stream buffer before writing to them.
2264      *
2265      * The HAL takes ownership of the actual buffer_handle_t entries in
2266      * output_buffers; the framework does not access them until they are
2267      * returned in a camera3_capture_result_t.
2268      *
2269      * <= CAMERA_DEVICE_API_VERSION_3_1:
2270      *
2271      * All the buffers included  here will have been registered with the HAL
2272      * through register_stream_buffers() before their inclusion in a request.
2273      *
2274      * >= CAMERA_DEVICE_API_VERSION_3_2:
2275      *
2276      * Any or all of the buffers included here may be brand new in this
2277      * request (having never before seen by the HAL).
2278      */
2279     const camera3_stream_buffer_t *output_buffers;
2280 
2281     /**
2282      * <= CAMERA_DEVICE_API_VERISON_3_4:
2283      *
2284      *    Not defined and must not be accessed.
2285      *
2286      * >= CAMERA_DEVICE_API_VERSION_3_5:
2287      *    The number of physical camera settings to be applied. If 'num_physcam_settings'
2288      *    equals 0 or a physical device is not included, then Hal must decide the
2289      *    specific physical device settings based on the default 'settings'.
2290      */
2291     uint32_t num_physcam_settings;
2292 
2293     /**
2294      * <= CAMERA_DEVICE_API_VERISON_3_4:
2295      *
2296      *    Not defined and must not be accessed.
2297      *
2298      * >= CAMERA_DEVICE_API_VERSION_3_5:
2299      *    The physical camera ids. The array will contain 'num_physcam_settings'
2300      *    camera id strings for all physical devices that have specific settings.
2301      *    In case some id is invalid, the process capture request must fail and return
2302      *    -EINVAL.
2303      */
2304     const char **physcam_id;
2305 
2306     /**
2307      * <= CAMERA_DEVICE_API_VERISON_3_4:
2308      *
2309      *    Not defined and must not be accessed.
2310      *
2311      * >= CAMERA_DEVICE_API_VERSION_3_5:
2312      *    The capture settings for the physical cameras. The array will contain
2313      *    'num_physcam_settings' settings for invididual physical devices. In
2314      *    case the settings at some particular index are empty, the process capture
2315      *    request must fail and return -EINVAL.
2316      */
2317     const camera_metadata_t **physcam_settings;
2318 
2319 } camera3_capture_request_t;
2320 
2321 /**
2322  * camera3_capture_result_t:
2323  *
2324  * The result of a single capture/reprocess by the camera HAL device. This is
2325  * sent to the framework asynchronously with process_capture_result(), in
2326  * response to a single capture request sent to the HAL with
2327  * process_capture_request(). Multiple process_capture_result() calls may be
2328  * performed by the HAL for each request.
2329  *
2330  * Each call, all with the same frame
2331  * number, may contain some subset of the output buffers, and/or the result
2332  * metadata. The metadata may only be provided once for a given frame number;
2333  * all other calls must set the result metadata to NULL.
2334  *
2335  * The result structure contains the output metadata from this capture, and the
2336  * set of output buffers that have been/will be filled for this capture. Each
2337  * output buffer may come with a release sync fence that the framework will wait
2338  * on before reading, in case the buffer has not yet been filled by the HAL.
2339  *
2340  * >= CAMERA_DEVICE_API_VERSION_3_2:
2341  *
2342  * The metadata may be provided multiple times for a single frame number. The
2343  * framework will accumulate together the final result set by combining each
2344  * partial result together into the total result set.
2345  *
2346  * If an input buffer is given in a request, the HAL must return it in one of
2347  * the process_capture_result calls, and the call may be to just return the input
2348  * buffer, without metadata and output buffers; the sync fences must be handled
2349  * the same way they are done for output buffers.
2350  *
2351  *
2352  * Performance considerations:
2353  *
2354  * Applications will also receive these partial results immediately, so sending
2355  * partial results is a highly recommended performance optimization to avoid
2356  * the total pipeline latency before sending the results for what is known very
2357  * early on in the pipeline.
2358  *
2359  * A typical use case might be calculating the AF state halfway through the
2360  * pipeline; by sending the state back to the framework immediately, we get a
2361  * 50% performance increase and perceived responsiveness of the auto-focus.
2362  *
2363  */
2364 typedef struct camera3_capture_result {
2365     /**
2366      * The frame number is an incrementing integer set by the framework in the
2367      * submitted request to uniquely identify this capture. It is also used to
2368      * identify the request in asynchronous notifications sent to
2369      * camera3_callback_ops_t.notify().
2370     */
2371     uint32_t frame_number;
2372 
2373     /**
2374      * The result metadata for this capture. This contains information about the
2375      * final capture parameters, the state of the capture and post-processing
2376      * hardware, the state of the 3A algorithms, if enabled, and the output of
2377      * any enabled statistics units.
2378      *
2379      * Only one call to process_capture_result() with a given frame_number may
2380      * include the result metadata. All other calls for the same frame_number
2381      * must set this to NULL.
2382      *
2383      * If there was an error producing the result metadata, result must be an
2384      * empty metadata buffer, and notify() must be called with ERROR_RESULT.
2385      *
2386      * >= CAMERA_DEVICE_API_VERSION_3_2:
2387      *
2388      * Multiple calls to process_capture_result() with a given frame_number
2389      * may include the result metadata.
2390      *
2391      * Partial metadata submitted should not include any metadata key returned
2392      * in a previous partial result for a given frame. Each new partial result
2393      * for that frame must also set a distinct partial_result value.
2394      *
2395      * If notify has been called with ERROR_RESULT, all further partial
2396      * results for that frame are ignored by the framework.
2397      */
2398     const camera_metadata_t *result;
2399 
2400     /**
2401      * The number of output buffers returned in this result structure. Must be
2402      * less than or equal to the matching capture request's count. If this is
2403      * less than the buffer count in the capture request, at least one more call
2404      * to process_capture_result with the same frame_number must be made, to
2405      * return the remaining output buffers to the framework. This may only be
2406      * zero if the structure includes valid result metadata or an input buffer
2407      * is returned in this result.
2408      */
2409     uint32_t num_output_buffers;
2410 
2411     /**
2412      * The handles for the output stream buffers for this capture. They may not
2413      * yet be filled at the time the HAL calls process_capture_result(); the
2414      * framework will wait on the release sync fences provided by the HAL before
2415      * reading the buffers.
2416      *
2417      * The HAL must set the stream buffer's release sync fence to a valid sync
2418      * fd, or to -1 if the buffer has already been filled.
2419      *
2420      * If the HAL encounters an error while processing the buffer, and the
2421      * buffer is not filled, the buffer's status field must be set to
2422      * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence
2423      * before encountering the error, the acquire fence should be copied into
2424      * the release fence, to allow the framework to wait on the fence before
2425      * reusing the buffer.
2426      *
2427      * The acquire fence must be set to -1 for all output buffers.  If
2428      * num_output_buffers is zero, this may be NULL. In that case, at least one
2429      * more process_capture_result call must be made by the HAL to provide the
2430      * output buffers.
2431      *
2432      * When process_capture_result is called with a new buffer for a frame,
2433      * all previous frames' buffers for that corresponding stream must have been
2434      * already delivered (the fences need not have yet been signaled).
2435      *
2436      * >= CAMERA_DEVICE_API_VERSION_3_2:
2437      *
2438      * Gralloc buffers for a frame may be sent to framework before the
2439      * corresponding SHUTTER-notify.
2440      *
2441      * Performance considerations:
2442      *
2443      * Buffers delivered to the framework will not be dispatched to the
2444      * application layer until a start of exposure timestamp has been received
2445      * via a SHUTTER notify() call. It is highly recommended to
2446      * dispatch that call as early as possible.
2447      */
2448      const camera3_stream_buffer_t *output_buffers;
2449 
2450      /**
2451       * >= CAMERA_DEVICE_API_VERSION_3_2:
2452       *
2453       * The handle for the input stream buffer for this capture. It may not
2454       * yet be consumed at the time the HAL calls process_capture_result(); the
2455       * framework will wait on the release sync fences provided by the HAL before
2456       * reusing the buffer.
2457       *
2458       * The HAL should handle the sync fences the same way they are done for
2459       * output_buffers.
2460       *
2461       * Only one input buffer is allowed to be sent per request. Similarly to
2462       * output buffers, the ordering of returned input buffers must be
2463       * maintained by the HAL.
2464       *
2465       * Performance considerations:
2466       *
2467       * The input buffer should be returned as early as possible. If the HAL
2468       * supports sync fences, it can call process_capture_result to hand it back
2469       * with sync fences being set appropriately. If the sync fences are not
2470       * supported, the buffer can only be returned when it is consumed, which
2471       * may take long time; the HAL may choose to copy this input buffer to make
2472       * the buffer return sooner.
2473       */
2474       const camera3_stream_buffer_t *input_buffer;
2475 
2476      /**
2477       * >= CAMERA_DEVICE_API_VERSION_3_2:
2478       *
2479       * In order to take advantage of partial results, the HAL must set the
2480       * static metadata android.request.partialResultCount to the number of
2481       * partial results it will send for each frame.
2482       *
2483       * Each new capture result with a partial result must set
2484       * this field (partial_result) to a distinct inclusive value between
2485       * 1 and android.request.partialResultCount.
2486       *
2487       * HALs not wishing to take advantage of this feature must not
2488       * set an android.request.partialResultCount or partial_result to a value
2489       * other than 1.
2490       *
2491       * This value must be set to 0 when a capture result contains buffers only
2492       * and no metadata.
2493       */
2494      uint32_t partial_result;
2495 
2496      /**
2497       * >= CAMERA_DEVICE_API_VERSION_3_5:
2498       *
2499       * Specifies the number of physical camera metadata this capture result
2500       * contains. It must be equal to the number of physical cameras being
2501       * requested from.
2502       *
2503       * If the current camera device is not a logical multi-camera, or the
2504       * corresponding capture_request doesn't request on any physical camera,
2505       * this field must be 0.
2506       */
2507      uint32_t num_physcam_metadata;
2508 
2509      /**
2510       * >= CAMERA_DEVICE_API_VERSION_3_5:
2511       *
2512       * An array of strings containing the physical camera ids for the returned
2513       * physical camera metadata. The length of the array is
2514       * num_physcam_metadata.
2515       */
2516      const char **physcam_ids;
2517 
2518      /**
2519       * >= CAMERA_DEVICE_API_VERSION_3_5:
2520       *
2521       * The array of physical camera metadata for the physical cameras being
2522       * requested upon. This array should have a 1-to-1 mapping with the
2523       * physcam_ids. The length of the array is num_physcam_metadata.
2524       */
2525      const camera_metadata_t **physcam_metadata;
2526 
2527 } camera3_capture_result_t;
2528 
2529 /**********************************************************************
2530  *
2531  * Callback methods for the HAL to call into the framework.
2532  *
2533  * These methods are used to return metadata and image buffers for a completed
2534  * or failed captures, and to notify the framework of asynchronous events such
2535  * as errors.
2536  *
2537  * The framework will not call back into the HAL from within these callbacks,
2538  * and these calls will not block for extended periods.
2539  *
2540  */
2541 typedef struct camera3_callback_ops {
2542 
2543     /**
2544      * process_capture_result:
2545      *
2546      * Send results from a completed capture to the framework.
2547      * process_capture_result() may be invoked multiple times by the HAL in
2548      * response to a single capture request. This allows, for example, the
2549      * metadata and low-resolution buffers to be returned in one call, and
2550      * post-processed JPEG buffers in a later call, once it is available. Each
2551      * call must include the frame number of the request it is returning
2552      * metadata or buffers for.
2553      *
2554      * A component (buffer or metadata) of the complete result may only be
2555      * included in one process_capture_result call. A buffer for each stream,
2556      * and the result metadata, must be returned by the HAL for each request in
2557      * one of the process_capture_result calls, even in case of errors producing
2558      * some of the output. A call to process_capture_result() with neither
2559      * output buffers or result metadata is not allowed.
2560      *
2561      * The order of returning metadata and buffers for a single result does not
2562      * matter, but buffers for a given stream must be returned in FIFO order. So
2563      * the buffer for request 5 for stream A must always be returned before the
2564      * buffer for request 6 for stream A. This also applies to the result
2565      * metadata; the metadata for request 5 must be returned before the metadata
2566      * for request 6.
2567      *
2568      * However, different streams are independent of each other, so it is
2569      * acceptable and expected that the buffer for request 5 for stream A may be
2570      * returned after the buffer for request 6 for stream B is. And it is
2571      * acceptable that the result metadata for request 6 for stream B is
2572      * returned before the buffer for request 5 for stream A is.
2573      *
2574      * The HAL retains ownership of result structure, which only needs to be
2575      * valid to access during this call. The framework will copy whatever it
2576      * needs before this call returns.
2577      *
2578      * The output buffers do not need to be filled yet; the framework will wait
2579      * on the stream buffer release sync fence before reading the buffer
2580      * data. Therefore, this method should be called by the HAL as soon as
2581      * possible, even if some or all of the output buffers are still in
2582      * being filled. The HAL must include valid release sync fences into each
2583      * output_buffers stream buffer entry, or -1 if that stream buffer is
2584      * already filled.
2585      *
2586      * If the result buffer cannot be constructed for a request, the HAL should
2587      * return an empty metadata buffer, but still provide the output buffers and
2588      * their sync fences. In addition, notify() must be called with an
2589      * ERROR_RESULT message.
2590      *
2591      * If an output buffer cannot be filled, its status field must be set to
2592      * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER
2593      * message.
2594      *
2595      * If the entire capture has failed, then this method still needs to be
2596      * called to return the output buffers to the framework. All the buffer
2597      * statuses should be STATUS_ERROR, and the result metadata should be an
2598      * empty buffer. In addition, notify() must be called with a ERROR_REQUEST
2599      * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages
2600      * should not be sent.
2601      *
2602      * Performance requirements:
2603      *
2604      * This is a non-blocking call. The framework will return this call in 5ms.
2605      *
2606      * The pipeline latency (see S7 for definition) should be less than or equal to
2607      * 4 frame intervals, and must be less than or equal to 8 frame intervals.
2608      *
2609      */
2610     void (*process_capture_result)(const struct camera3_callback_ops *,
2611             const camera3_capture_result_t *result);
2612 
2613     /**
2614      * notify:
2615      *
2616      * Asynchronous notification callback from the HAL, fired for various
2617      * reasons. Only for information independent of frame capture, or that
2618      * require specific timing. The ownership of the message structure remains
2619      * with the HAL, and the msg only needs to be valid for the duration of this
2620      * call.
2621      *
2622      * Multiple threads may call notify() simultaneously.
2623      *
2624      * <= CAMERA_DEVICE_API_VERSION_3_1:
2625      *
2626      * The notification for the start of exposure for a given request must be
2627      * sent by the HAL before the first call to process_capture_result() for
2628      * that request is made.
2629      *
2630      * >= CAMERA_DEVICE_API_VERSION_3_2:
2631      *
2632      * Buffers delivered to the framework will not be dispatched to the
2633      * application layer until a start of exposure timestamp (or input image's
2634      * start of exposure timestamp for a reprocess request) has been received
2635      * via a SHUTTER notify() call. It is highly recommended to dispatch this
2636      * call as early as possible.
2637      *
2638      * ------------------------------------------------------------------------
2639      * Performance requirements:
2640      *
2641      * This is a non-blocking call. The framework will return this call in 5ms.
2642      */
2643     void (*notify)(const struct camera3_callback_ops *,
2644             const camera3_notify_msg_t *msg);
2645 
2646 } camera3_callback_ops_t;
2647 
2648 /**********************************************************************
2649  *
2650  * Camera device operations
2651  *
2652  */
2653 typedef struct camera3_device_ops {
2654 
2655     /**
2656      * initialize:
2657      *
2658      * One-time initialization to pass framework callback function pointers to
2659      * the HAL. Will be called once after a successful open() call, before any
2660      * other functions are called on the camera3_device_ops structure.
2661      *
2662      * Performance requirements:
2663      *
2664      * This should be a non-blocking call. The HAL should return from this call
2665      * in 5ms, and must return from this call in 10ms.
2666      *
2667      * Return values:
2668      *
2669      *  0:     On successful initialization
2670      *
2671      * -ENODEV: If initialization fails. Only close() can be called successfully
2672      *          by the framework after this.
2673      */
2674     int (*initialize)(const struct camera3_device *,
2675             const camera3_callback_ops_t *callback_ops);
2676 
2677     /**********************************************************************
2678      * Stream management
2679      */
2680 
2681     /**
2682      * configure_streams:
2683      *
2684      * CAMERA_DEVICE_API_VERSION_3_0 only:
2685      *
2686      * Reset the HAL camera device processing pipeline and set up new input and
2687      * output streams. This call replaces any existing stream configuration with
2688      * the streams defined in the stream_list. This method will be called at
2689      * least once after initialize() before a request is submitted with
2690      * process_capture_request().
2691      *
2692      * The stream_list must contain at least one output-capable stream, and may
2693      * not contain more than one input-capable stream.
2694      *
2695      * The stream_list may contain streams that are also in the currently-active
2696      * set of streams (from the previous call to configure_stream()). These
2697      * streams will already have valid values for usage, max_buffers, and the
2698      * private pointer.
2699      *
2700      * If such a stream has already had its buffers registered,
2701      * register_stream_buffers() will not be called again for the stream, and
2702      * buffers from the stream can be immediately included in input requests.
2703      *
2704      * If the HAL needs to change the stream configuration for an existing
2705      * stream due to the new configuration, it may rewrite the values of usage
2706      * and/or max_buffers during the configure call.
2707      *
2708      * The framework will detect such a change, and will then reallocate the
2709      * stream buffers, and call register_stream_buffers() again before using
2710      * buffers from that stream in a request.
2711      *
2712      * If a currently-active stream is not included in stream_list, the HAL may
2713      * safely remove any references to that stream. It will not be reused in a
2714      * later configure() call by the framework, and all the gralloc buffers for
2715      * it will be freed after the configure_streams() call returns.
2716      *
2717      * The stream_list structure is owned by the framework, and may not be
2718      * accessed once this call completes. The address of an individual
2719      * camera3_stream_t structure will remain valid for access by the HAL until
2720      * the end of the first configure_stream() call which no longer includes
2721      * that camera3_stream_t in the stream_list argument. The HAL may not change
2722      * values in the stream structure outside of the private pointer, except for
2723      * the usage and max_buffers members during the configure_streams() call
2724      * itself.
2725      *
2726      * If the stream is new, the usage, max_buffer, and private pointer fields
2727      * of the stream structure will all be set to 0. The HAL device must set
2728      * these fields before the configure_streams() call returns. These fields
2729      * are then used by the framework and the platform gralloc module to
2730      * allocate the gralloc buffers for each stream.
2731      *
2732      * Before such a new stream can have its buffers included in a capture
2733      * request, the framework will call register_stream_buffers() with that
2734      * stream. However, the framework is not required to register buffers for
2735      * _all_ streams before submitting a request. This allows for quick startup
2736      * of (for example) a preview stream, with allocation for other streams
2737      * happening later or concurrently.
2738      *
2739      * ------------------------------------------------------------------------
2740      * CAMERA_DEVICE_API_VERSION_3_1 only:
2741      *
2742      * Reset the HAL camera device processing pipeline and set up new input and
2743      * output streams. This call replaces any existing stream configuration with
2744      * the streams defined in the stream_list. This method will be called at
2745      * least once after initialize() before a request is submitted with
2746      * process_capture_request().
2747      *
2748      * The stream_list must contain at least one output-capable stream, and may
2749      * not contain more than one input-capable stream.
2750      *
2751      * The stream_list may contain streams that are also in the currently-active
2752      * set of streams (from the previous call to configure_stream()). These
2753      * streams will already have valid values for usage, max_buffers, and the
2754      * private pointer.
2755      *
2756      * If such a stream has already had its buffers registered,
2757      * register_stream_buffers() will not be called again for the stream, and
2758      * buffers from the stream can be immediately included in input requests.
2759      *
2760      * If the HAL needs to change the stream configuration for an existing
2761      * stream due to the new configuration, it may rewrite the values of usage
2762      * and/or max_buffers during the configure call.
2763      *
2764      * The framework will detect such a change, and will then reallocate the
2765      * stream buffers, and call register_stream_buffers() again before using
2766      * buffers from that stream in a request.
2767      *
2768      * If a currently-active stream is not included in stream_list, the HAL may
2769      * safely remove any references to that stream. It will not be reused in a
2770      * later configure() call by the framework, and all the gralloc buffers for
2771      * it will be freed after the configure_streams() call returns.
2772      *
2773      * The stream_list structure is owned by the framework, and may not be
2774      * accessed once this call completes. The address of an individual
2775      * camera3_stream_t structure will remain valid for access by the HAL until
2776      * the end of the first configure_stream() call which no longer includes
2777      * that camera3_stream_t in the stream_list argument. The HAL may not change
2778      * values in the stream structure outside of the private pointer, except for
2779      * the usage and max_buffers members during the configure_streams() call
2780      * itself.
2781      *
2782      * If the stream is new, max_buffer, and private pointer fields of the
2783      * stream structure will all be set to 0. The usage will be set to the
2784      * consumer usage flags. The HAL device must set these fields before the
2785      * configure_streams() call returns. These fields are then used by the
2786      * framework and the platform gralloc module to allocate the gralloc
2787      * buffers for each stream.
2788      *
2789      * Before such a new stream can have its buffers included in a capture
2790      * request, the framework will call register_stream_buffers() with that
2791      * stream. However, the framework is not required to register buffers for
2792      * _all_ streams before submitting a request. This allows for quick startup
2793      * of (for example) a preview stream, with allocation for other streams
2794      * happening later or concurrently.
2795      *
2796      * ------------------------------------------------------------------------
2797      * >= CAMERA_DEVICE_API_VERSION_3_2:
2798      *
2799      * Reset the HAL camera device processing pipeline and set up new input and
2800      * output streams. This call replaces any existing stream configuration with
2801      * the streams defined in the stream_list. This method will be called at
2802      * least once after initialize() before a request is submitted with
2803      * process_capture_request().
2804      *
2805      * The stream_list must contain at least one output-capable stream, and may
2806      * not contain more than one input-capable stream.
2807      *
2808      * The stream_list may contain streams that are also in the currently-active
2809      * set of streams (from the previous call to configure_stream()). These
2810      * streams will already have valid values for usage, max_buffers, and the
2811      * private pointer.
2812      *
2813      * If the HAL needs to change the stream configuration for an existing
2814      * stream due to the new configuration, it may rewrite the values of usage
2815      * and/or max_buffers during the configure call.
2816      *
2817      * The framework will detect such a change, and may then reallocate the
2818      * stream buffers before using buffers from that stream in a request.
2819      *
2820      * If a currently-active stream is not included in stream_list, the HAL may
2821      * safely remove any references to that stream. It will not be reused in a
2822      * later configure() call by the framework, and all the gralloc buffers for
2823      * it will be freed after the configure_streams() call returns.
2824      *
2825      * The stream_list structure is owned by the framework, and may not be
2826      * accessed once this call completes. The address of an individual
2827      * camera3_stream_t structure will remain valid for access by the HAL until
2828      * the end of the first configure_stream() call which no longer includes
2829      * that camera3_stream_t in the stream_list argument. The HAL may not change
2830      * values in the stream structure outside of the private pointer, except for
2831      * the usage and max_buffers members during the configure_streams() call
2832      * itself.
2833      *
2834      * If the stream is new, max_buffer, and private pointer fields of the
2835      * stream structure will all be set to 0. The usage will be set to the
2836      * consumer usage flags. The HAL device must set these fields before the
2837      * configure_streams() call returns. These fields are then used by the
2838      * framework and the platform gralloc module to allocate the gralloc
2839      * buffers for each stream.
2840      *
2841      * Newly allocated buffers may be included in a capture request at any time
2842      * by the framework. Once a gralloc buffer is returned to the framework
2843      * with process_capture_result (and its respective release_fence has been
2844      * signaled) the framework may free or reuse it at any time.
2845      *
2846      * ------------------------------------------------------------------------
2847      *
2848      * Preconditions:
2849      *
2850      * The framework will only call this method when no captures are being
2851      * processed. That is, all results have been returned to the framework, and
2852      * all in-flight input and output buffers have been returned and their
2853      * release sync fences have been signaled by the HAL. The framework will not
2854      * submit new requests for capture while the configure_streams() call is
2855      * underway.
2856      *
2857      * Postconditions:
2858      *
2859      * The HAL device must configure itself to provide maximum possible output
2860      * frame rate given the sizes and formats of the output streams, as
2861      * documented in the camera device's static metadata.
2862      *
2863      * Performance requirements:
2864      *
2865      * This call is expected to be heavyweight and possibly take several hundred
2866      * milliseconds to complete, since it may require resetting and
2867      * reconfiguring the image sensor and the camera processing pipeline.
2868      * Nevertheless, the HAL device should attempt to minimize the
2869      * reconfiguration delay to minimize the user-visible pauses during
2870      * application operational mode changes (such as switching from still
2871      * capture to video recording).
2872      *
2873      * The HAL should return from this call in 500ms, and must return from this
2874      * call in 1000ms.
2875      *
2876      * Return values:
2877      *
2878      *  0:      On successful stream configuration
2879      *
2880      * -EINVAL: If the requested stream configuration is invalid. Some examples
2881      *          of invalid stream configurations include:
2882      *
2883      *          - Including more than 1 input-capable stream (INPUT or
2884      *            BIDIRECTIONAL)
2885      *
2886      *          - Not including any output-capable streams (OUTPUT or
2887      *            BIDIRECTIONAL)
2888      *
2889      *          - Including streams with unsupported formats, or an unsupported
2890      *            size for that format.
2891      *
2892      *          - Including too many output streams of a certain format.
2893      *
2894      *          - Unsupported rotation configuration (only applies to
2895      *            devices with version >= CAMERA_DEVICE_API_VERSION_3_3)
2896      *
2897      *          - Stream sizes/formats don't satisfy the
2898      *            camera3_stream_configuration_t->operation_mode requirements for non-NORMAL mode,
2899      *            or the requested operation_mode is not supported by the HAL.
2900      *            (only applies to devices with version >= CAMERA_DEVICE_API_VERSION_3_3)
2901      *
2902      *          Note that the framework submitting an invalid stream
2903      *          configuration is not normal operation, since stream
2904      *          configurations are checked before configure. An invalid
2905      *          configuration means that a bug exists in the framework code, or
2906      *          there is a mismatch between the HAL's static metadata and the
2907      *          requirements on streams.
2908      *
2909      * -ENODEV: If there has been a fatal error and the device is no longer
2910      *          operational. Only close() can be called successfully by the
2911      *          framework after this error is returned.
2912      */
2913     int (*configure_streams)(const struct camera3_device *,
2914             camera3_stream_configuration_t *stream_list);
2915 
2916     /**
2917      * register_stream_buffers:
2918      *
2919      * >= CAMERA_DEVICE_API_VERSION_3_2:
2920      *
2921      * DEPRECATED. This will not be called and must be set to NULL.
2922      *
2923      * <= CAMERA_DEVICE_API_VERSION_3_1:
2924      *
2925      * Register buffers for a given stream with the HAL device. This method is
2926      * called by the framework after a new stream is defined by
2927      * configure_streams, and before buffers from that stream are included in a
2928      * capture request. If the same stream is listed in a subsequent
2929      * configure_streams() call, register_stream_buffers will _not_ be called
2930      * again for that stream.
2931      *
2932      * The framework does not need to register buffers for all configured
2933      * streams before it submits the first capture request. This allows quick
2934      * startup for preview (or similar use cases) while other streams are still
2935      * being allocated.
2936      *
2937      * This method is intended to allow the HAL device to map or otherwise
2938      * prepare the buffers for later use. The buffers passed in will already be
2939      * locked for use. At the end of the call, all the buffers must be ready to
2940      * be returned to the stream.  The buffer_set argument is only valid for the
2941      * duration of this call.
2942      *
2943      * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2944      * the camera HAL should inspect the passed-in buffers here to determine any
2945      * platform-private pixel format information.
2946      *
2947      * Performance requirements:
2948      *
2949      * This should be a non-blocking call. The HAL should return from this call
2950      * in 1ms, and must return from this call in 5ms.
2951      *
2952      * Return values:
2953      *
2954      *  0:      On successful registration of the new stream buffers
2955      *
2956      * -EINVAL: If the stream_buffer_set does not refer to a valid active
2957      *          stream, or if the buffers array is invalid.
2958      *
2959      * -ENOMEM: If there was a failure in registering the buffers. The framework
2960      *          must consider all the stream buffers to be unregistered, and can
2961      *          try to register again later.
2962      *
2963      * -ENODEV: If there is a fatal error, and the device is no longer
2964      *          operational. Only close() can be called successfully by the
2965      *          framework after this error is returned.
2966      */
2967     int (*register_stream_buffers)(const struct camera3_device *,
2968             const camera3_stream_buffer_set_t *buffer_set);
2969 
2970     /**********************************************************************
2971      * Request creation and submission
2972      */
2973 
2974     /**
2975      * construct_default_request_settings:
2976      *
2977      * Create capture settings for standard camera use cases.
2978      *
2979      * The device must return a settings buffer that is configured to meet the
2980      * requested use case, which must be one of the CAMERA3_TEMPLATE_*
2981      * enums. All request control fields must be included.
2982      *
2983      * The HAL retains ownership of this structure, but the pointer to the
2984      * structure must be valid until the device is closed. The framework and the
2985      * HAL may not modify the buffer once it is returned by this call. The same
2986      * buffer may be returned for subsequent calls for the same template, or for
2987      * other templates.
2988      *
2989      * Performance requirements:
2990      *
2991      * This should be a non-blocking call. The HAL should return from this call
2992      * in 1ms, and must return from this call in 5ms.
2993      *
2994      * Return values:
2995      *
2996      *   Valid metadata: On successful creation of a default settings
2997      *                   buffer.
2998      *
2999      *   NULL:           In case of a fatal error. After this is returned, only
3000      *                   the close() method can be called successfully by the
3001      *                   framework.
3002      */
3003     const camera_metadata_t* (*construct_default_request_settings)(
3004             const struct camera3_device *,
3005             int type);
3006 
3007     /**
3008      * process_capture_request:
3009      *
3010      * Send a new capture request to the HAL. The HAL should not return from
3011      * this call until it is ready to accept the next request to process. Only
3012      * one call to process_capture_request() will be made at a time by the
3013      * framework, and the calls will all be from the same thread. The next call
3014      * to process_capture_request() will be made as soon as a new request and
3015      * its associated buffers are available. In a normal preview scenario, this
3016      * means the function will be called again by the framework almost
3017      * instantly.
3018      *
3019      * The actual request processing is asynchronous, with the results of
3020      * capture being returned by the HAL through the process_capture_result()
3021      * call. This call requires the result metadata to be available, but output
3022      * buffers may simply provide sync fences to wait on. Multiple requests are
3023      * expected to be in flight at once, to maintain full output frame rate.
3024      *
3025      * The framework retains ownership of the request structure. It is only
3026      * guaranteed to be valid during this call. The HAL device must make copies
3027      * of the information it needs to retain for the capture processing. The HAL
3028      * is responsible for waiting on and closing the buffers' fences and
3029      * returning the buffer handles to the framework.
3030      *
3031      * The HAL must write the file descriptor for the input buffer's release
3032      * sync fence into input_buffer->release_fence, if input_buffer is not
3033      * NULL. If the HAL returns -1 for the input buffer release sync fence, the
3034      * framework is free to immediately reuse the input buffer. Otherwise, the
3035      * framework will wait on the sync fence before refilling and reusing the
3036      * input buffer.
3037      *
3038      * >= CAMERA_DEVICE_API_VERSION_3_2:
3039      *
3040      * The input/output buffers provided by the framework in each request
3041      * may be brand new (having never before seen by the HAL).
3042      *
3043      * ------------------------------------------------------------------------
3044      * Performance considerations:
3045      *
3046      * Handling a new buffer should be extremely lightweight and there should be
3047      * no frame rate degradation or frame jitter introduced.
3048      *
3049      * This call must return fast enough to ensure that the requested frame
3050      * rate can be sustained, especially for streaming cases (post-processing
3051      * quality settings set to FAST). The HAL should return this call in 1
3052      * frame interval, and must return from this call in 4 frame intervals.
3053      *
3054      * Return values:
3055      *
3056      *  0:      On a successful start to processing the capture request
3057      *
3058      * -EINVAL: If the input is malformed (the settings are NULL when not
3059      *          allowed, invalid physical camera settings,
3060      *          there are 0 output buffers, etc) and capture processing
3061      *          cannot start. Failures during request processing should be
3062      *          handled by calling camera3_callback_ops_t.notify(). In case of
3063      *          this error, the framework will retain responsibility for the
3064      *          stream buffers' fences and the buffer handles; the HAL should
3065      *          not close the fences or return these buffers with
3066      *          process_capture_result.
3067      *
3068      * -ENODEV: If the camera device has encountered a serious error. After this
3069      *          error is returned, only the close() method can be successfully
3070      *          called by the framework.
3071      *
3072      */
3073     int (*process_capture_request)(const struct camera3_device *,
3074             camera3_capture_request_t *request);
3075 
3076     /**********************************************************************
3077      * Miscellaneous methods
3078      */
3079 
3080     /**
3081      * get_metadata_vendor_tag_ops:
3082      *
3083      * Get methods to query for vendor extension metadata tag information. The
3084      * HAL should fill in all the vendor tag operation methods, or leave ops
3085      * unchanged if no vendor tags are defined.
3086      *
3087      * The definition of vendor_tag_query_ops_t can be found in
3088      * system/media/camera/include/system/camera_metadata.h.
3089      *
3090      * >= CAMERA_DEVICE_API_VERSION_3_2:
3091      *    DEPRECATED. This function has been deprecated and should be set to
3092      *    NULL by the HAL.  Please implement get_vendor_tag_ops in camera_common.h
3093      *    instead.
3094      */
3095     void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,
3096             vendor_tag_query_ops_t* ops);
3097 
3098     /**
3099      * dump:
3100      *
3101      * Print out debugging state for the camera device. This will be called by
3102      * the framework when the camera service is asked for a debug dump, which
3103      * happens when using the dumpsys tool, or when capturing a bugreport.
3104      *
3105      * The passed-in file descriptor can be used to write debugging text using
3106      * dprintf() or write(). The text should be in ASCII encoding only.
3107      *
3108      * Performance requirements:
3109      *
3110      * This must be a non-blocking call. The HAL should return from this call
3111      * in 1ms, must return from this call in 10ms. This call must avoid
3112      * deadlocks, as it may be called at any point during camera operation.
3113      * Any synchronization primitives used (such as mutex locks or semaphores)
3114      * should be acquired with a timeout.
3115      */
3116     void (*dump)(const struct camera3_device *, int fd);
3117 
3118     /**
3119      * flush:
3120      *
3121      * Flush all currently in-process captures and all buffers in the pipeline
3122      * on the given device. The framework will use this to dump all state as
3123      * quickly as possible in order to prepare for a configure_streams() call.
3124      *
3125      * No buffers are required to be successfully returned, so every buffer
3126      * held at the time of flush() (whether successfully filled or not) may be
3127      * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed
3128      * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call,
3129      * provided they are successfully filled.
3130      *
3131      * All requests currently in the HAL are expected to be returned as soon as
3132      * possible.  Not-in-process requests should return errors immediately. Any
3133      * interruptible hardware blocks should be stopped, and any uninterruptible
3134      * blocks should be waited on.
3135      *
3136      * flush() may be called concurrently to process_capture_request(), with the expectation that
3137      * process_capture_request will return quickly and the request submitted in that
3138      * process_capture_request call is treated like all other in-flight requests.  Due to
3139      * concurrency issues, it is possible that from the HAL's point of view, a
3140      * process_capture_request() call may be started after flush has been invoked but has not
3141      * returned yet. If such a call happens before flush() returns, the HAL should treat the new
3142      * capture request like other in-flight pending requests (see #4 below).
3143      *
3144      * More specifically, the HAL must follow below requirements for various cases:
3145      *
3146      * 1. For captures that are too late for the HAL to cancel/stop, and will be
3147      *    completed normally by the HAL; i.e. the HAL can send shutter/notify and
3148      *    process_capture_result and buffers as normal.
3149      *
3150      * 2. For pending requests that have not done any processing, the HAL must call notify
3151      *    CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with
3152      *    process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR).
3153      *    The HAL must not place the release fence into an error state, instead,
3154      *    the release fences must be set to the acquire fences passed by the framework,
3155      *    or -1 if they have been waited on by the HAL already. This is also the path
3156      *    to follow for any captures for which the HAL already called notify() with
3157      *    CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for.
3158      *    After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with
3159      *    buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or
3160      *    process_capture_result with non-null metadata is allowed.
3161      *
3162      * 3. For partially completed pending requests that will not have all the output
3163      *    buffers or perhaps missing metadata, the HAL should follow below:
3164      *
3165      *    3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result
3166      *    metadata (i.e. one or more partial metadata) won't be available for the capture.
3167      *
3168      *    3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't
3169      *         be produced for the capture.
3170      *
3171      *    3.3  Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before
3172      *         any buffers/metadata are returned with process_capture_result.
3173      *
3174      *    3.4 For captures that will produce some results, the HAL must not call
3175      *        CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure.
3176      *
3177      *    3.5. Valid buffers/metadata should be passed to the framework as normal.
3178      *
3179      *    3.6. Failed buffers should be returned to the framework as described for case 2.
3180      *         But failed buffers do not have to follow the strict ordering valid buffers do,
3181      *         and may be out-of-order with respect to valid buffers. For example, if buffers
3182      *         A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable
3183      *         return order.
3184      *
3185      *    3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no
3186      *         need to call process_capture_result with NULL metadata or equivalent.
3187      *
3188      * 4. If a flush() is invoked while a process_capture_request() invocation is active, that
3189      *    process call should return as soon as possible. In addition, if a process_capture_request()
3190      *    call is made after flush() has been invoked but before flush() has returned, the
3191      *    capture request provided by the late process_capture_request call should be treated like
3192      *    a pending request in case #2 above.
3193      *
3194      * flush() should only return when there are no more outstanding buffers or
3195      * requests left in the HAL. The framework may call configure_streams (as
3196      * the HAL state is now quiesced) or may issue new requests.
3197      *
3198      * Note that it's sufficient to only support fully-succeeded and fully-failed result cases.
3199      * However, it is highly desirable to support the partial failure cases as well, as it
3200      * could help improve the flush call overall performance.
3201      *
3202      * Performance requirements:
3203      *
3204      * The HAL should return from this call in 100ms, and must return from this
3205      * call in 1000ms. And this call must not be blocked longer than pipeline
3206      * latency (see S7 for definition).
3207      *
3208      * Version information:
3209      *
3210      *   only available if device version >= CAMERA_DEVICE_API_VERSION_3_1.
3211      *
3212      * Return values:
3213      *
3214      *  0:      On a successful flush of the camera HAL.
3215      *
3216      * -EINVAL: If the input is malformed (the device is not valid).
3217      *
3218      * -ENODEV: If the camera device has encountered a serious error. After this
3219      *          error is returned, only the close() method can be successfully
3220      *          called by the framework.
3221      */
3222     int (*flush)(const struct camera3_device *);
3223 
3224     /* reserved for future use */
3225     void *reserved[8];
3226 } camera3_device_ops_t;
3227 
3228 /**********************************************************************
3229  *
3230  * Camera device definition
3231  *
3232  */
3233 typedef struct camera3_device {
3234     /**
3235      * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this
3236      * device as implementing version 3.0 of the camera device HAL.
3237      *
3238      * Performance requirements:
3239      *
3240      * Camera open (common.module->common.methods->open) should return in 200ms, and must return
3241      * in 500ms.
3242      * Camera close (common.close) should return in 200ms, and must return in 500ms.
3243      *
3244      */
3245     hw_device_t common;
3246     camera3_device_ops_t *ops;
3247     void *priv;
3248 } camera3_device_t;
3249 
3250 __END_DECLS
3251 
3252 #endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */
3253