1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include "utils/Timers.h"
44 #include "sys/ioctl.h"
45 #include <time.h>
46 #include <sync/sync.h>
47 #include "gralloc_priv.h"
48 #include <map>
49 #include <unordered_map>
50 
51 // Display dependencies
52 #include "qdMetaData.h"
53 
54 // Camera dependencies
55 #include "android/QCamera3External.h"
56 #include "util/QCameraFlash.h"
57 #include "QCamera3HWI.h"
58 #include "QCamera3VendorTags.h"
59 #include "QCameraTrace.h"
60 
61 // XML parsing
62 #include "tinyxml2.h"
63 
64 #include "HdrPlusClientUtils.h"
65 
66 extern "C" {
67 #include "mm_camera_dbg.h"
68 }
69 #include "cam_cond.h"
70 
71 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
72 using namespace android;
73 
74 namespace qcamera {
75 
76 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
77 
78 #define EMPTY_PIPELINE_DELAY 2
79 // mm_camera has 2 partial results: 3A, and final result.
80 // HDR+ requests have 3 partial results: postview, next request ready, and final result.
81 #define PARTIAL_RESULT_COUNT 3
82 #define FRAME_SKIP_DELAY     0
83 
84 #define MAX_VALUE_8BIT ((1<<8)-1)
85 #define MAX_VALUE_10BIT ((1<<10)-1)
86 #define MAX_VALUE_12BIT ((1<<12)-1)
87 
88 #define VIDEO_4K_WIDTH  3840
89 #define VIDEO_4K_HEIGHT 2160
90 
91 #define MAX_EIS_WIDTH 3840
92 #define MAX_EIS_HEIGHT 2160
93 
94 #define MAX_RAW_STREAMS        1
95 #define MAX_STALLING_STREAMS   1
96 #define MAX_PROCESSED_STREAMS  3
97 /* Batch mode is enabled only if FPS set is equal to or greater than this */
98 #define MIN_FPS_FOR_BATCH_MODE (120)
99 #define PREVIEW_FPS_FOR_HFR    (30)
100 #define DEFAULT_VIDEO_FPS      (30.0)
101 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
102 #define MAX_HFR_BATCH_SIZE     (8)
103 #define REGIONS_TUPLE_COUNT    5
104 // Set a threshold for detection of missing buffers //seconds
105 #define MISSING_REQUEST_BUF_TIMEOUT 10
106 #define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
107 #define FLUSH_TIMEOUT 3
108 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
109 
110 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
111                                               CAM_QCOM_FEATURE_CROP |\
112                                               CAM_QCOM_FEATURE_ROTATION |\
113                                               CAM_QCOM_FEATURE_SHARPNESS |\
114                                               CAM_QCOM_FEATURE_SCALE |\
115                                               CAM_QCOM_FEATURE_CAC |\
116                                               CAM_QCOM_FEATURE_CDS )
117 /* Per configuration size for static metadata length*/
118 #define PER_CONFIGURATION_SIZE_3 (3)
119 
120 #define TIMEOUT_NEVER -1
121 
122 /* Face rect indices */
123 #define FACE_LEFT              0
124 #define FACE_TOP               1
125 #define FACE_RIGHT             2
126 #define FACE_BOTTOM            3
127 #define FACE_WEIGHT            4
128 
129 /* Face landmarks indices */
130 #define LEFT_EYE_X             0
131 #define LEFT_EYE_Y             1
132 #define RIGHT_EYE_X            2
133 #define RIGHT_EYE_Y            3
134 #define MOUTH_X                4
135 #define MOUTH_Y                5
136 #define TOTAL_LANDMARK_INDICES 6
137 
138 // Max preferred zoom
139 #define MAX_PREFERRED_ZOOM_RATIO 7.0
140 
141 // Whether to check for the GPU stride padding, or use the default
142 //#define CHECK_GPU_PIXEL_ALIGNMENT
143 
144 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
145 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
146 extern pthread_mutex_t gCamLock;
147 volatile uint32_t gCamHal3LogLevel = 1;
148 extern uint8_t gNumCameraSessions;
149 
150 // Note that this doesn't support concurrent front and back camera b/35960155.
151 // The following Easel related variables must be protected by gHdrPlusClientLock.
152 std::unique_ptr<EaselManagerClient> gEaselManagerClient;
153 bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
154 int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
155 std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
156 bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
157 std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
158 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
159 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
160 
161 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
162 bool gEaselBypassOnly;
163 
164 std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
165 
166 
167 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
168     {"On",  CAM_CDS_MODE_ON},
169     {"Off", CAM_CDS_MODE_OFF},
170     {"Auto",CAM_CDS_MODE_AUTO}
171 };
172 const QCamera3HardwareInterface::QCameraMap<
173         camera_metadata_enum_android_video_hdr_mode_t,
174         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
175     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
176     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
177 };
178 
179 const QCamera3HardwareInterface::QCameraMap<
180         camera_metadata_enum_android_binning_correction_mode_t,
181         cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
182     { QCAMERA3_BINNING_CORRECTION_MODE_OFF,  CAM_BINNING_CORRECTION_MODE_OFF },
183     { QCAMERA3_BINNING_CORRECTION_MODE_ON,   CAM_BINNING_CORRECTION_MODE_ON }
184 };
185 
186 const QCamera3HardwareInterface::QCameraMap<
187         camera_metadata_enum_android_ir_mode_t,
188         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
189     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
190     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
191     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
192 };
193 
194 const QCamera3HardwareInterface::QCameraMap<
195         camera_metadata_enum_android_control_effect_mode_t,
196         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
197     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
198     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
199     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
200     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
201     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
202     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
203     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
204     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
205     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
206 };
207 
208 const QCamera3HardwareInterface::QCameraMap<
209         camera_metadata_enum_android_control_awb_mode_t,
210         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
211     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
212     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
213     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
214     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
215     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
216     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
217     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
218     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
219     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
220 };
221 
222 const QCamera3HardwareInterface::QCameraMap<
223         camera_metadata_enum_android_control_scene_mode_t,
224         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
225     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
226     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
227     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
228     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
229     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
230     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
231     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
232     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
233     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
234     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
235     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
236     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
237     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
238     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
239     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
240     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE},
241     { ANDROID_CONTROL_SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR}
242 };
243 
244 const QCamera3HardwareInterface::QCameraMap<
245         camera_metadata_enum_android_control_af_mode_t,
246         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
247     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
248     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
249     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
250     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
251     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
252     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
253     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
254 };
255 
256 const QCamera3HardwareInterface::QCameraMap<
257         camera_metadata_enum_android_color_correction_aberration_mode_t,
258         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
259     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
260             CAM_COLOR_CORRECTION_ABERRATION_OFF },
261     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
262             CAM_COLOR_CORRECTION_ABERRATION_FAST },
263     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
264             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
265 };
266 
267 const QCamera3HardwareInterface::QCameraMap<
268         camera_metadata_enum_android_control_ae_antibanding_mode_t,
269         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
270     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
271     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
272     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
273     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
274 };
275 
276 const QCamera3HardwareInterface::QCameraMap<
277         camera_metadata_enum_android_control_ae_mode_t,
278         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
279     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
280     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
281     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
282     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
283     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
284     { ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
285 };
286 
287 const QCamera3HardwareInterface::QCameraMap<
288         camera_metadata_enum_android_flash_mode_t,
289         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
290     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
291     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
292     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
293 };
294 
295 const QCamera3HardwareInterface::QCameraMap<
296         camera_metadata_enum_android_statistics_face_detect_mode_t,
297         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
298     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
299     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
300     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
301 };
302 
303 const QCamera3HardwareInterface::QCameraMap<
304         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
305         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
306     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
307       CAM_FOCUS_UNCALIBRATED },
308     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
309       CAM_FOCUS_APPROXIMATE },
310     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
311       CAM_FOCUS_CALIBRATED }
312 };
313 
314 const QCamera3HardwareInterface::QCameraMap<
315         camera_metadata_enum_android_lens_state_t,
316         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
317     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
318     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
319 };
320 
321 const int32_t available_thumbnail_sizes[] = {0, 0,
322                                              176, 144,
323                                              240, 144,
324                                              256, 144,
325                                              240, 160,
326                                              256, 154,
327                                              240, 240,
328                                              320, 240};
329 
330 const QCamera3HardwareInterface::QCameraMap<
331         camera_metadata_enum_android_sensor_test_pattern_mode_t,
332         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
333     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
334     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
335     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
336     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
337     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
338     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
339 };
340 
341 /* Since there is no mapping for all the options some Android enum are not listed.
342  * Also, the order in this list is important because while mapping from HAL to Android it will
343  * traverse from lower to higher index which means that for HAL values that are map to different
344  * Android values, the traverse logic will select the first one found.
345  */
346 const QCamera3HardwareInterface::QCameraMap<
347         camera_metadata_enum_android_sensor_reference_illuminant1_t,
348         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
349     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
350     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
351     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
352     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
353     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
354     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
355     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
356     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
357     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
358     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
359     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
360     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
361     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
362     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
363     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
364     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
365 };
366 
367 const QCamera3HardwareInterface::QCameraMap<
368         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
369     { 60, CAM_HFR_MODE_60FPS},
370     { 90, CAM_HFR_MODE_90FPS},
371     { 120, CAM_HFR_MODE_120FPS},
372     { 150, CAM_HFR_MODE_150FPS},
373     { 180, CAM_HFR_MODE_180FPS},
374     { 210, CAM_HFR_MODE_210FPS},
375     { 240, CAM_HFR_MODE_240FPS},
376     { 480, CAM_HFR_MODE_480FPS},
377 };
378 
379 const QCamera3HardwareInterface::QCameraMap<
380         qcamera3_ext_instant_aec_mode_t,
381         cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
382     { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
383     { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
384     { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
385 };
386 
387 const QCamera3HardwareInterface::QCameraMap<
388         qcamera3_ext_exposure_meter_mode_t,
389         cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
390     { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
391     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
392     { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
393     { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
394     { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
395     { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
396     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
397 };
398 
399 const QCamera3HardwareInterface::QCameraMap<
400         qcamera3_ext_iso_mode_t,
401         cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
402     { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
403     { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
404     { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
405     { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
406     { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
407     { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
408     { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
409     { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
410 };
411 
412 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
413     .initialize                         = QCamera3HardwareInterface::initialize,
414     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
415     .register_stream_buffers            = NULL,
416     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
417     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
418     .get_metadata_vendor_tag_ops        = NULL,
419     .dump                               = QCamera3HardwareInterface::dump,
420     .flush                              = QCamera3HardwareInterface::flush,
421     .reserved                           = {0},
422 };
423 
424 typedef std::tuple<int32_t, int32_t, int32_t, int32_t> config_entry;
425 
operator ==(const config_entry & lhs,const config_entry & rhs)426 bool operator == (const config_entry & lhs, const config_entry & rhs) {
427     return (std::get<0> (lhs) == std::get<0> (rhs)) &&
428         (std::get<1> (lhs) == std::get<1> (rhs)) &&
429         (std::get<2> (lhs) == std::get<2> (rhs)) &&
430         (std::get<3> (lhs) == std::get<3> (rhs));
431 }
432 
433 struct ConfigEntryHash {
operator ()qcamera::ConfigEntryHash434     std::size_t operator() (config_entry const& entry) const {
435         size_t result = 1;
436         size_t hashValue = 31;
437         result = hashValue*result + std::hash<int> {} (std::get<0>(entry));
438         result = hashValue*result + std::hash<int> {} (std::get<1>(entry));
439         result = hashValue*result + std::hash<int> {} (std::get<2>(entry));
440         result = hashValue*result + std::hash<int> {} (std::get<3>(entry));
441         return result;
442     }
443 };
444 
445 // initialise to some default value
446 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
447 
logEaselEvent(const char * tag,const char * event)448 static inline void logEaselEvent(const char *tag, const char *event) {
449     if (CC_UNLIKELY(gEaselProfilingEnabled)) {
450         struct timespec ts = {};
451         static int64_t kMsPerSec = 1000;
452         static int64_t kNsPerMs = 1000000;
453         status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
454         if (res != OK) {
455             ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
456         } else {
457             int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
458             ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
459         }
460     }
461 }
462 
463 /*===========================================================================
464  * FUNCTION   : QCamera3HardwareInterface
465  *
466  * DESCRIPTION: constructor of QCamera3HardwareInterface
467  *
468  * PARAMETERS :
469  *   @cameraId  : camera ID
470  *
471  * RETURN     : none
472  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)473 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
474         const camera_module_callbacks_t *callbacks)
475     : mCameraId(cameraId),
476       mCameraHandle(NULL),
477       mCameraInitialized(false),
478       mCallbackOps(NULL),
479       mMetadataChannel(NULL),
480       mPictureChannel(NULL),
481       mRawChannel(NULL),
482       mSupportChannel(NULL),
483       mAnalysisChannel(NULL),
484       mRawDumpChannel(NULL),
485       mHdrPlusRawSrcChannel(NULL),
486       mDummyBatchChannel(NULL),
487       mDepthChannel(NULL),
488       mDepthCloudMode(CAM_PD_DATA_SKIP),
489       mPerfLockMgr(),
490       mChannelHandle(0),
491       mFirstConfiguration(true),
492       mFlush(false),
493       mFlushPerf(false),
494       mParamHeap(NULL),
495       mParameters(NULL),
496       mPrevParameters(NULL),
497       m_ISTypeVideo(IS_TYPE_NONE),
498       m_bIsVideo(false),
499       m_bIs4KVideo(false),
500       m_bEisSupportedSize(false),
501       m_bEisEnable(false),
502       m_bEis3PropertyEnabled(false),
503       m_bAVTimerEnabled(false),
504       m_MobicatMask(0),
505       mShutterDispatcher(this),
506       mOutputBufferDispatcher(this),
507       mMinProcessedFrameDuration(0),
508       mMinJpegFrameDuration(0),
509       mMinRawFrameDuration(0),
510       mExpectedFrameDuration(0),
511       mExpectedInflightDuration(0),
512       mMetaFrameCount(0U),
513       mUpdateDebugLevel(false),
514       mCallbacks(callbacks),
515       mCaptureIntent(0),
516       mCacMode(0),
517       /* DevCamDebug metadata internal m control*/
518       mDevCamDebugMetaEnable(0),
519       /* DevCamDebug metadata end */
520       mBatchSize(0),
521       mToBeQueuedVidBufs(0),
522       mHFRVideoFps(DEFAULT_VIDEO_FPS),
523       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
524       mStreamConfig(false),
525       mCommon(),
526       mFirstFrameNumberInBatch(0),
527       mNeedSensorRestart(false),
528       mPreviewStarted(false),
529       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
530       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
531       mPDSupported(false),
532       mPDIndex(0),
533       mInstantAEC(false),
534       mResetInstantAEC(false),
535       mInstantAECSettledFrameNumber(0),
536       mAecSkipDisplayFrameBound(0),
537       mInstantAecFrameIdxCount(0),
538       mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
539       mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
540       mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
541       mLastRequestedZoomRatio(1.0f),
542       mCurrFeatureState(0),
543       mLdafCalibExist(false),
544       mLastCustIntentFrmNum(-1),
545       mFirstMetadataCallback(true),
546       mState(CLOSED),
547       mIsDeviceLinked(false),
548       mIsMainCamera(true),
549       mLinkedCameraId(0),
550       m_pDualCamCmdHeap(NULL),
551       m_pDualCamCmdPtr(NULL),
552       mHdrPlusModeEnabled(false),
553       mZslEnabled(false),
554       mEaselMipiStarted(false),
555       mIsApInputUsedForHdrPlus(false),
556       mFirstPreviewIntentSeen(false),
557       m_bSensorHDREnabled(false),
558       mAfTrigger(),
559       mSceneDistance(-1),
560       mLastFocusDistance(0.0)
561 {
562     getLogLevel();
563     mCommon.init(gCamCapability[cameraId]);
564     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
565 #ifndef USE_HAL_3_3
566     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
567 #else
568     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
569 #endif
570     mCameraDevice.common.close = close_camera_device;
571     mCameraDevice.ops = &mCameraOps;
572     mCameraDevice.priv = this;
573     gCamCapability[cameraId]->version = CAM_HAL_V3;
574     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
575     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
576     gCamCapability[cameraId]->min_num_pp_bufs = 3;
577 
578     PTHREAD_COND_INIT(&mBuffersCond);
579 
580     PTHREAD_COND_INIT(&mRequestCond);
581     mPendingLiveRequest = 0;
582     mCurrentRequestId = -1;
583     pthread_mutex_init(&mMutex, NULL);
584 
585     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
586         mDefaultMetadata[i] = NULL;
587 
588     // Getting system props of different kinds
589     char prop[PROPERTY_VALUE_MAX];
590     memset(prop, 0, sizeof(prop));
591     property_get("persist.camera.raw.dump", prop, "0");
592     mEnableRawDump = atoi(prop);
593     property_get("persist.camera.hal3.force.hdr", prop, "0");
594     mForceHdrSnapshot = atoi(prop);
595 
596     if (mEnableRawDump)
597         LOGD("Raw dump from Camera HAL enabled");
598 
599     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
600     memset(mLdafCalib, 0, sizeof(mLdafCalib));
601 
602     memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
603     mEaselFwUpdated = false;
604 
605     memset(prop, 0, sizeof(prop));
606     property_get("persist.camera.tnr.preview", prop, "0");
607     m_bTnrPreview = (uint8_t)atoi(prop);
608 
609     memset(prop, 0, sizeof(prop));
610     property_get("persist.camera.swtnr.preview", prop, "1");
611     m_bSwTnrPreview = (uint8_t)atoi(prop);
612 
613     memset(prop, 0, sizeof(prop));
614     property_get("persist.camera.tnr.video", prop, "1");
615     m_bTnrVideo = (uint8_t)atoi(prop);
616 
617     memset(prop, 0, sizeof(prop));
618     property_get("persist.camera.avtimer.debug", prop, "0");
619     m_debug_avtimer = (uint8_t)atoi(prop);
620     LOGI("AV timer enabled: %d", m_debug_avtimer);
621 
622     memset(prop, 0, sizeof(prop));
623     property_get("persist.camera.cacmode.disable", prop, "0");
624     m_cacModeDisabled = (uint8_t)atoi(prop);
625 
626     m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
627     m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
628 
629     //Load and read GPU library.
630     lib_surface_utils = NULL;
631     LINK_get_surface_pixel_alignment = NULL;
632     mSurfaceStridePadding = CAM_PAD_TO_64;
633 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
634     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
635     if (lib_surface_utils) {
636         *(void **)&LINK_get_surface_pixel_alignment =
637                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
638          if (LINK_get_surface_pixel_alignment) {
639              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
640          }
641          dlclose(lib_surface_utils);
642     }
643 #endif
644     mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
645     mPDSupported = (0 <= mPDIndex) ? true : false;
646 
647     m60HzZone = is60HzZone();
648 }
649 
650 /*===========================================================================
651  * FUNCTION   : ~QCamera3HardwareInterface
652  *
653  * DESCRIPTION: destructor of QCamera3HardwareInterface
654  *
655  * PARAMETERS : none
656  *
657  * RETURN     : none
658  *==========================================================================*/
~QCamera3HardwareInterface()659 QCamera3HardwareInterface::~QCamera3HardwareInterface()
660 {
661     LOGD("E");
662 
663     int32_t rc = 0;
664 
665     // Clean up Easel error future first to avoid Easel error happens during destructor.
666     cleanupEaselErrorFuture();
667 
668     // Disable power hint and enable the perf lock for close camera
669     mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
670     mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
671 
672     // Close HDR+ client first before destroying HAL.
673     {
674         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
675         finishHdrPlusClientOpeningLocked(l);
676         closeHdrPlusClientLocked();
677     }
678 
679     // unlink of dualcam during close camera
680     if (mIsDeviceLinked) {
681         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
682                 &m_pDualCamCmdPtr->bundle_info;
683         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
684         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
685         pthread_mutex_lock(&gCamLock);
686 
687         if (mIsMainCamera == 1) {
688             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
689             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
690             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
691             // related session id should be session id of linked session
692             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
693         } else {
694             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
695             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
696             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
697             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
698         }
699         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
700         pthread_mutex_unlock(&gCamLock);
701 
702         rc = mCameraHandle->ops->set_dual_cam_cmd(
703                 mCameraHandle->camera_handle);
704         if (rc < 0) {
705             LOGE("Dualcam: Unlink failed, but still proceed to close");
706         }
707     }
708 
709     /* We need to stop all streams before deleting any stream */
710     if (mRawDumpChannel) {
711         mRawDumpChannel->stop();
712     }
713 
714     if (mHdrPlusRawSrcChannel) {
715         mHdrPlusRawSrcChannel->stop();
716     }
717 
718     // NOTE: 'camera3_stream_t *' objects are already freed at
719     //        this stage by the framework
720     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
721         it != mStreamInfo.end(); it++) {
722         QCamera3ProcessingChannel *channel = (*it)->channel;
723         if (channel) {
724             channel->stop();
725         }
726     }
727     if (mSupportChannel)
728         mSupportChannel->stop();
729 
730     if (mAnalysisChannel) {
731         mAnalysisChannel->stop();
732     }
733     if (mMetadataChannel) {
734         mMetadataChannel->stop();
735     }
736     if (mChannelHandle) {
737         stopChannelLocked(/*stop_immediately*/false);
738     }
739 
740     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
741         it != mStreamInfo.end(); it++) {
742         QCamera3ProcessingChannel *channel = (*it)->channel;
743         if (channel)
744             delete channel;
745         free (*it);
746     }
747     if (mSupportChannel) {
748         delete mSupportChannel;
749         mSupportChannel = NULL;
750     }
751 
752     if (mAnalysisChannel) {
753         delete mAnalysisChannel;
754         mAnalysisChannel = NULL;
755     }
756     if (mRawDumpChannel) {
757         delete mRawDumpChannel;
758         mRawDumpChannel = NULL;
759     }
760     if (mHdrPlusRawSrcChannel) {
761         delete mHdrPlusRawSrcChannel;
762         mHdrPlusRawSrcChannel = NULL;
763     }
764     if (mDummyBatchChannel) {
765         delete mDummyBatchChannel;
766         mDummyBatchChannel = NULL;
767     }
768 
769     mPictureChannel = NULL;
770     mDepthChannel = NULL;
771 
772     if (mMetadataChannel) {
773         delete mMetadataChannel;
774         mMetadataChannel = NULL;
775     }
776 
777     /* Clean up all channels */
778     if (mCameraInitialized) {
779         if(!mFirstConfiguration){
780             //send the last unconfigure
781             cam_stream_size_info_t stream_config_info;
782             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
783             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
784             stream_config_info.buffer_info.max_buffers =
785                     m_bIs4KVideo ? 0 :
786                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
787             clear_metadata_buffer(mParameters);
788             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
789                     stream_config_info);
790             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
791             if (rc < 0) {
792                 LOGE("set_parms failed for unconfigure");
793             }
794         }
795         deinitParameters();
796     }
797 
798     if (mChannelHandle) {
799         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
800                 mChannelHandle);
801         LOGH("deleting channel %d", mChannelHandle);
802         mChannelHandle = 0;
803     }
804 
805     if (mState != CLOSED)
806         closeCamera();
807 
808     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
809         req.mPendingBufferList.clear();
810     }
811     mPendingBuffersMap.mPendingBuffersInRequest.clear();
812     for (pendingRequestIterator i = mPendingRequestsList.begin();
813             i != mPendingRequestsList.end();) {
814         i = erasePendingRequest(i);
815     }
816     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
817         if (mDefaultMetadata[i])
818             free_camera_metadata(mDefaultMetadata[i]);
819 
820     mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
821 
822     pthread_cond_destroy(&mRequestCond);
823 
824     pthread_cond_destroy(&mBuffersCond);
825 
826     pthread_mutex_destroy(&mMutex);
827     LOGD("X");
828 }
829 
830 /*===========================================================================
831  * FUNCTION   : erasePendingRequest
832  *
833  * DESCRIPTION: function to erase a desired pending request after freeing any
834  *              allocated memory
835  *
836  * PARAMETERS :
837  *   @i       : iterator pointing to pending request to be erased
838  *
839  * RETURN     : iterator pointing to the next request
840  *==========================================================================*/
841 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)842         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
843 {
844     if (i->input_buffer != NULL) {
845         free(i->input_buffer);
846         i->input_buffer = NULL;
847     }
848     if (i->settings != NULL)
849         free_camera_metadata((camera_metadata_t*)i->settings);
850 
851     mExpectedInflightDuration -= i->expectedFrameDuration;
852     if (mExpectedInflightDuration < 0) {
853         LOGE("Negative expected in-flight duration!");
854         mExpectedInflightDuration = 0;
855     }
856 
857     return mPendingRequestsList.erase(i);
858 }
859 
860 /*===========================================================================
861  * FUNCTION   : camEvtHandle
862  *
863  * DESCRIPTION: Function registered to mm-camera-interface to handle events
864  *
865  * PARAMETERS :
866  *   @camera_handle : interface layer camera handle
867  *   @evt           : ptr to event
868  *   @user_data     : user data ptr
869  *
870  * RETURN     : none
871  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)872 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
873                                           mm_camera_event_t *evt,
874                                           void *user_data)
875 {
876     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
877     if (obj && evt) {
878         switch(evt->server_event_type) {
879             case CAM_EVENT_TYPE_DAEMON_DIED:
880                 pthread_mutex_lock(&obj->mMutex);
881                 obj->mState = ERROR;
882                 pthread_mutex_unlock(&obj->mMutex);
883                 LOGE("Fatal, camera daemon died");
884                 break;
885 
886             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
887                 LOGD("HAL got request pull from Daemon");
888                 pthread_mutex_lock(&obj->mMutex);
889                 obj->mWokenUpByDaemon = true;
890                 obj->unblockRequestIfNecessary();
891                 pthread_mutex_unlock(&obj->mMutex);
892                 break;
893 
894             default:
895                 LOGW("Warning: Unhandled event %d",
896                         evt->server_event_type);
897                 break;
898         }
899     } else {
900         LOGE("NULL user_data/evt");
901     }
902 }
903 
904 /*===========================================================================
905  * FUNCTION   : openCamera
906  *
907  * DESCRIPTION: open camera
908  *
909  * PARAMETERS :
910  *   @hw_device  : double ptr for camera device struct
911  *
912  * RETURN     : int32_t type of status
913  *              NO_ERROR  -- success
914  *              none-zero failure code
915  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)916 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
917 {
918     int rc = 0;
919     if (mState != CLOSED) {
920         *hw_device = NULL;
921         return PERMISSION_DENIED;
922     }
923 
924     logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
925     mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
926     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
927              mCameraId);
928 
929     if (mCameraHandle) {
930         LOGE("Failure: Camera already opened");
931         return ALREADY_EXISTS;
932     }
933 
934     {
935         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
936         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
937             logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
938             if (gActiveEaselClient == 0) {
939                 rc = gEaselManagerClient->resume(this);
940                 if (rc != 0) {
941                     ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
942                     return rc;
943                 }
944                 mEaselFwUpdated = false;
945             }
946             gActiveEaselClient++;
947 
948             mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
949             rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
950             if (rc != OK) {
951                 ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
952                         strerror(-rc), rc);
953                 return rc;
954             }
955         }
956     }
957 
958     rc = openCamera();
959     if (rc == 0) {
960         *hw_device = &mCameraDevice.common;
961     } else {
962         *hw_device = NULL;
963 
964         // Suspend Easel because opening camera failed.
965         {
966             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
967             if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
968                 if (gActiveEaselClient == 1) {
969                     status_t suspendErr = gEaselManagerClient->suspend();
970                     if (suspendErr != 0) {
971                         ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
972                                 strerror(-suspendErr), suspendErr);
973                     }
974                 }
975                 gActiveEaselClient--;
976             }
977 
978             if (mQCamera3HdrPlusListenerThread != nullptr) {
979                 mQCamera3HdrPlusListenerThread->requestExit();
980                 mQCamera3HdrPlusListenerThread->join();
981                 mQCamera3HdrPlusListenerThread = nullptr;
982             }
983         }
984     }
985 
986     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
987              mCameraId, rc);
988 
989     if (rc == NO_ERROR) {
990         mState = OPENED;
991     }
992 
993     return rc;
994 }
995 
996 /*===========================================================================
997  * FUNCTION   : openCamera
998  *
999  * DESCRIPTION: open camera
1000  *
1001  * PARAMETERS : none
1002  *
1003  * RETURN     : int32_t type of status
1004  *              NO_ERROR  -- success
1005  *              none-zero failure code
1006  *==========================================================================*/
openCamera()1007 int QCamera3HardwareInterface::openCamera()
1008 {
1009     int rc = 0;
1010     char value[PROPERTY_VALUE_MAX];
1011 
1012     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
1013 
1014     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
1015     if (rc < 0) {
1016         LOGE("Failed to reserve flash for camera id: %d",
1017                 mCameraId);
1018         return UNKNOWN_ERROR;
1019     }
1020 
1021     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
1022     if (rc) {
1023         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
1024         return rc;
1025     }
1026 
1027     if (!mCameraHandle) {
1028         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
1029         return -ENODEV;
1030     }
1031 
1032     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
1033             camEvtHandle, (void *)this);
1034 
1035     if (rc < 0) {
1036         LOGE("Error, failed to register event callback");
1037         /* Not closing camera here since it is already handled in destructor */
1038         return FAILED_TRANSACTION;
1039     }
1040 
1041     mExifParams.debug_params =
1042             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
1043     if (mExifParams.debug_params) {
1044         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
1045     } else {
1046         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1047         return NO_MEMORY;
1048     }
1049     mFirstConfiguration = true;
1050 
1051     //Notify display HAL that a camera session is active.
1052     //But avoid calling the same during bootup because camera service might open/close
1053     //cameras at boot time during its initialization and display service will also internally
1054     //wait for camera service to initialize first while calling this display API, resulting in a
1055     //deadlock situation. Since boot time camera open/close calls are made only to fetch
1056     //capabilities, no need of this display bw optimization.
1057     //Use "service.bootanim.exit" property to know boot status.
1058     property_get("service.bootanim.exit", value, "0");
1059     if (atoi(value) == 1) {
1060         pthread_mutex_lock(&gCamLock);
1061         if (gNumCameraSessions++ == 0) {
1062             setCameraLaunchStatus(true);
1063         }
1064         pthread_mutex_unlock(&gCamLock);
1065     }
1066 
1067     //fill the session id needed while linking dual cam
1068     pthread_mutex_lock(&gCamLock);
1069     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1070         &sessionId[mCameraId]);
1071     pthread_mutex_unlock(&gCamLock);
1072 
1073     if (rc < 0) {
1074         LOGE("Error, failed to get sessiion id");
1075         return UNKNOWN_ERROR;
1076     } else {
1077         //Allocate related cam sync buffer
1078         //this is needed for the payload that goes along with bundling cmd for related
1079         //camera use cases
1080         m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1081         rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
1082         if(rc != OK) {
1083             rc = NO_MEMORY;
1084             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1085             return NO_MEMORY;
1086         }
1087 
1088         //Map memory for related cam sync buffer
1089         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1090                 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1091                 m_pDualCamCmdHeap->getFd(0),
1092                 sizeof(cam_dual_camera_cmd_info_t),
1093                 m_pDualCamCmdHeap->getPtr(0));
1094         if(rc < 0) {
1095             LOGE("Dualcam: failed to map Related cam sync buffer");
1096             rc = FAILED_TRANSACTION;
1097             return NO_MEMORY;
1098         }
1099         m_pDualCamCmdPtr =
1100                 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
1101     }
1102 
1103     LOGH("mCameraId=%d",mCameraId);
1104 
1105     return NO_ERROR;
1106 }
1107 
1108 /*===========================================================================
1109  * FUNCTION   : closeCamera
1110  *
1111  * DESCRIPTION: close camera
1112  *
1113  * PARAMETERS : none
1114  *
1115  * RETURN     : int32_t type of status
1116  *              NO_ERROR  -- success
1117  *              none-zero failure code
1118  *==========================================================================*/
closeCamera()1119 int QCamera3HardwareInterface::closeCamera()
1120 {
1121     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
1122     int rc = NO_ERROR;
1123     char value[PROPERTY_VALUE_MAX];
1124 
1125     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1126              mCameraId);
1127 
1128     // unmap memory for related cam sync buffer
1129     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1130             CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
1131     if (NULL != m_pDualCamCmdHeap) {
1132         m_pDualCamCmdHeap->deallocate();
1133         delete m_pDualCamCmdHeap;
1134         m_pDualCamCmdHeap = NULL;
1135         m_pDualCamCmdPtr = NULL;
1136     }
1137 
1138     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1139     mCameraHandle = NULL;
1140 
1141     //reset session id to some invalid id
1142     pthread_mutex_lock(&gCamLock);
1143     sessionId[mCameraId] = 0xDEADBEEF;
1144     pthread_mutex_unlock(&gCamLock);
1145 
1146     //Notify display HAL that there is no active camera session
1147     //but avoid calling the same during bootup. Refer to openCamera
1148     //for more details.
1149     property_get("service.bootanim.exit", value, "0");
1150     if (atoi(value) == 1) {
1151         pthread_mutex_lock(&gCamLock);
1152         if (--gNumCameraSessions == 0) {
1153             setCameraLaunchStatus(false);
1154         }
1155         pthread_mutex_unlock(&gCamLock);
1156     }
1157 
1158     if (mExifParams.debug_params) {
1159         free(mExifParams.debug_params);
1160         mExifParams.debug_params = NULL;
1161     }
1162     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1163         LOGW("Failed to release flash for camera id: %d",
1164                 mCameraId);
1165     }
1166     mState = CLOSED;
1167     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1168          mCameraId, rc);
1169 
1170     {
1171         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1172         if (EaselManagerClientOpened) {
1173             if (gActiveEaselClient == 1) {
1174                 rc = gEaselManagerClient->suspend();
1175                 if (rc != 0) {
1176                     ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1177                 }
1178             }
1179             gActiveEaselClient--;
1180         }
1181 
1182         if (mQCamera3HdrPlusListenerThread != nullptr) {
1183             mQCamera3HdrPlusListenerThread->requestExit();
1184             mQCamera3HdrPlusListenerThread->join();
1185             mQCamera3HdrPlusListenerThread = nullptr;
1186         }
1187     }
1188 
1189     return rc;
1190 }
1191 
1192 /*===========================================================================
1193  * FUNCTION   : initialize
1194  *
1195  * DESCRIPTION: Initialize frameworks callback functions
1196  *
1197  * PARAMETERS :
1198  *   @callback_ops : callback function to frameworks
1199  *
1200  * RETURN     :
1201  *
1202  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)1203 int QCamera3HardwareInterface::initialize(
1204         const struct camera3_callback_ops *callback_ops)
1205 {
1206     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
1207     int rc;
1208 
1209     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1210     pthread_mutex_lock(&mMutex);
1211 
1212     // Validate current state
1213     switch (mState) {
1214         case OPENED:
1215             /* valid state */
1216             break;
1217         default:
1218             LOGE("Invalid state %d", mState);
1219             rc = -ENODEV;
1220             goto err1;
1221     }
1222 
1223     rc = initParameters();
1224     if (rc < 0) {
1225         LOGE("initParamters failed %d", rc);
1226         goto err1;
1227     }
1228     mCallbackOps = callback_ops;
1229 
1230     mChannelHandle = mCameraHandle->ops->add_channel(
1231             mCameraHandle->camera_handle, NULL, NULL, this);
1232     if (mChannelHandle == 0) {
1233         LOGE("add_channel failed");
1234         rc = -ENOMEM;
1235         pthread_mutex_unlock(&mMutex);
1236         return rc;
1237     }
1238 
1239     pthread_mutex_unlock(&mMutex);
1240     mCameraInitialized = true;
1241     mState = INITIALIZED;
1242     LOGI("X");
1243     return 0;
1244 
1245 err1:
1246     pthread_mutex_unlock(&mMutex);
1247     return rc;
1248 }
1249 
1250 /*===========================================================================
1251  * FUNCTION   : validateStreamDimensions
1252  *
1253  * DESCRIPTION: Check if the configuration requested are those advertised
1254  *
1255  * PARAMETERS :
1256  *   @cameraId : cameraId
1257  *   @stream_list : streams to be configured
1258  *
1259  * RETURN     :
1260  *
1261  *==========================================================================*/
validateStreamDimensions(uint32_t cameraId,camera3_stream_configuration_t * streamList)1262 int QCamera3HardwareInterface::validateStreamDimensions(uint32_t cameraId,
1263         camera3_stream_configuration_t *streamList)
1264 {
1265     int rc = NO_ERROR;
1266     size_t count = 0;
1267     uint32_t depthWidth = 0;
1268     uint32_t depthHeight = 0;
1269     auto pDIndex = getPDStatIndex(gCamCapability[cameraId]);
1270     bool pDSupported = (0 <= pDIndex) ? true : false;
1271     if (pDSupported) {
1272         depthWidth = gCamCapability[cameraId]->raw_meta_dim[pDIndex].width;
1273         depthHeight = gCamCapability[cameraId]->raw_meta_dim[pDIndex].height;
1274     }
1275 
1276     camera3_stream_t *inputStream = NULL;
1277     /*
1278     * Loop through all streams to find input stream if it exists*
1279     */
1280     for (size_t i = 0; i< streamList->num_streams; i++) {
1281         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1282             if (inputStream != NULL) {
1283                 LOGE("Error, Multiple input streams requested");
1284                 return -EINVAL;
1285             }
1286             inputStream = streamList->streams[i];
1287         }
1288     }
1289     /*
1290     * Loop through all streams requested in configuration
1291     * Check if unsupported sizes have been requested on any of them
1292     */
1293     for (size_t j = 0; j < streamList->num_streams; j++) {
1294         bool sizeFound = false;
1295         camera3_stream_t *newStream = streamList->streams[j];
1296 
1297         uint32_t rotatedHeight = newStream->height;
1298         uint32_t rotatedWidth = newStream->width;
1299         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1300                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1301             rotatedHeight = newStream->width;
1302             rotatedWidth = newStream->height;
1303         }
1304 
1305         /*
1306         * Sizes are different for each type of stream format check against
1307         * appropriate table.
1308         */
1309         switch (newStream->format) {
1310         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1311         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1312         case HAL_PIXEL_FORMAT_RAW10:
1313             if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1314                     (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1315                     pDSupported) {
1316                 if ((depthWidth == newStream->width) &&
1317                         (depthHeight == newStream->height)) {
1318                     sizeFound = true;
1319                 }
1320                 break;
1321             }
1322             count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1323             for (size_t i = 0; i < count; i++) {
1324                 if ((gCamCapability[cameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1325                         (gCamCapability[cameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1326                     sizeFound = true;
1327                     break;
1328                 }
1329             }
1330             break;
1331         case HAL_PIXEL_FORMAT_BLOB:
1332             if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1333                     pDSupported) {
1334                 //As per spec. depth cloud should be sample count / 16
1335                 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
1336                 if ((depthSamplesCount == newStream->width) &&
1337                         (1 == newStream->height)) {
1338                     sizeFound = true;
1339                 }
1340                 break;
1341             }
1342             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1343             /* Verify set size against generated sizes table */
1344             for (size_t i = 0; i < count; i++) {
1345                 if (((int32_t)rotatedWidth ==
1346                         gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1347                         ((int32_t)rotatedHeight ==
1348                         gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1349                     sizeFound = true;
1350                     break;
1351                 }
1352             }
1353             break;
1354         case HAL_PIXEL_FORMAT_Y8:
1355         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1356         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1357         default:
1358             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1359                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1360                     || IS_USAGE_ZSL(newStream->usage)) {
1361                 if (((int32_t)rotatedWidth ==
1362                                 gCamCapability[cameraId]->active_array_size.width) &&
1363                                 ((int32_t)rotatedHeight ==
1364                                 gCamCapability[cameraId]->active_array_size.height)) {
1365                     sizeFound = true;
1366                     break;
1367                 }
1368                 /* We could potentially break here to enforce ZSL stream
1369                  * set from frameworks always is full active array size
1370                  * but it is not clear from the spc if framework will always
1371                  * follow that, also we have logic to override to full array
1372                  * size, so keeping the logic lenient at the moment
1373                  */
1374             }
1375             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1376                     MAX_SIZES_CNT);
1377             for (size_t i = 0; i < count; i++) {
1378                 if (((int32_t)rotatedWidth ==
1379                             gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1380                             ((int32_t)rotatedHeight ==
1381                             gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1382                     sizeFound = true;
1383                     break;
1384                 }
1385             }
1386             break;
1387         } /* End of switch(newStream->format) */
1388 
1389         /* We error out even if a single stream has unsupported size set */
1390         if (!sizeFound) {
1391             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1392                     rotatedWidth, rotatedHeight, newStream->format,
1393                     gCamCapability[cameraId]->active_array_size.width,
1394                     gCamCapability[cameraId]->active_array_size.height);
1395             rc = -EINVAL;
1396             break;
1397         }
1398     } /* End of for each stream */
1399     return rc;
1400 }
1401 
1402 /*===========================================================================
1403  * FUNCTION   : validateUsageFlags
1404  *
1405  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1406  *
1407  * PARAMETERS :
1408  *   @cameraId : camera id.
1409  *   @stream_list : streams to be configured
1410  *
1411  * RETURN     :
1412  *   NO_ERROR if the usage flags are supported
1413  *   error code if usage flags are not supported
1414  *
1415  *==========================================================================*/
validateUsageFlags(uint32_t cameraId,const camera3_stream_configuration_t * streamList)1416 int QCamera3HardwareInterface::validateUsageFlags(uint32_t cameraId,
1417         const camera3_stream_configuration_t* streamList)
1418 {
1419     for (size_t j = 0; j < streamList->num_streams; j++) {
1420         const camera3_stream_t *newStream = streamList->streams[j];
1421 
1422         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1423             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1424              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1425             continue;
1426         }
1427 
1428         // Here we only care whether it's EIS3 or not
1429         char is_type_value[PROPERTY_VALUE_MAX];
1430         property_get("persist.camera.is_type", is_type_value, "4");
1431         cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1432         if (gCamCapability[cameraId]->position == CAM_POSITION_FRONT ||
1433                 streamList->operation_mode ==
1434                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1435             isType = IS_TYPE_NONE;
1436 
1437         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1438         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1439         bool isZSL = IS_USAGE_ZSL(newStream->usage);
1440         bool forcePreviewUBWC = true;
1441         if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1442             forcePreviewUBWC = false;
1443         }
1444         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1445                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
1446         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1447                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
1448         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1449                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
1450 
1451         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1452         // So color spaces will always match.
1453 
1454         // Check whether underlying formats of shared streams match.
1455         if (isVideo && isPreview && videoFormat != previewFormat) {
1456             LOGE("Combined video and preview usage flag is not supported");
1457             return -EINVAL;
1458         }
1459         if (isPreview && isZSL && previewFormat != zslFormat) {
1460             LOGE("Combined preview and zsl usage flag is not supported");
1461             return -EINVAL;
1462         }
1463         if (isVideo && isZSL && videoFormat != zslFormat) {
1464             LOGE("Combined video and zsl usage flag is not supported");
1465             return -EINVAL;
1466         }
1467     }
1468     return NO_ERROR;
1469 }
1470 
1471 /*===========================================================================
1472  * FUNCTION   : validateUsageFlagsForEis
1473  *
1474  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1475  *
1476  * PARAMETERS :
1477  *   @bEisEnable : Flag indicated that EIS is enabled.
1478  *   @bEisSupportedSize : Flag indicating that there is a preview/video stream
1479  *                        within the EIS supported size.
1480  *   @stream_list : streams to be configured
1481  *
1482  * RETURN     :
1483  *   NO_ERROR if the usage flags are supported
1484  *   error code if usage flags are not supported
1485  *
1486  *==========================================================================*/
validateUsageFlagsForEis(bool bEisEnable,bool bEisSupportedSize,const camera3_stream_configuration_t * streamList)1487 int QCamera3HardwareInterface::validateUsageFlagsForEis(bool bEisEnable, bool bEisSupportedSize,
1488         const camera3_stream_configuration_t* streamList)
1489 {
1490     for (size_t j = 0; j < streamList->num_streams; j++) {
1491         const camera3_stream_t *newStream = streamList->streams[j];
1492 
1493         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1494         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1495 
1496         // Because EIS is "hard-coded" for certain use case, and current
1497        // implementation doesn't support shared preview and video on the same
1498         // stream, return failure if EIS is forced on.
1499         if (isPreview && isVideo && bEisEnable && bEisSupportedSize) {
1500             LOGE("Combined video and preview usage flag is not supported due to EIS");
1501             return -EINVAL;
1502         }
1503     }
1504     return NO_ERROR;
1505 }
1506 
1507 /*==============================================================================
1508  * FUNCTION   : isSupportChannelNeeded
1509  *
1510  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1511  *
1512  * PARAMETERS :
1513  *   @stream_list : streams to be configured
1514  *   @stream_config_info : the config info for streams to be configured
1515  *
1516  * RETURN     : Boolen true/false decision
1517  *
1518  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1519 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1520         camera3_stream_configuration_t *streamList,
1521         cam_stream_size_info_t stream_config_info)
1522 {
1523     uint32_t i;
1524     bool pprocRequested = false;
1525     /* Check for conditions where PProc pipeline does not have any streams*/
1526     for (i = 0; i < stream_config_info.num_streams; i++) {
1527         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1528                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1529             pprocRequested = true;
1530             break;
1531         }
1532     }
1533 
1534     if (pprocRequested == false )
1535         return true;
1536 
1537     /* Dummy stream needed if only raw or jpeg streams present */
1538     for (i = 0; i < streamList->num_streams; i++) {
1539         switch(streamList->streams[i]->format) {
1540             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1541             case HAL_PIXEL_FORMAT_RAW10:
1542             case HAL_PIXEL_FORMAT_RAW16:
1543             case HAL_PIXEL_FORMAT_BLOB:
1544                 break;
1545             default:
1546                 return false;
1547         }
1548     }
1549     return true;
1550 }
1551 
1552 /*==============================================================================
1553  * FUNCTION   : sensor_mode_info
1554  *
1555  * DESCRIPTION: Get sensor mode information based on current stream configuratoin
1556  *
1557  * PARAMETERS :
1558  *   @sensor_mode_info : sensor mode information (output)
1559  *
1560  * RETURN     : int32_t type of status
1561  *              NO_ERROR  -- success
1562  *              none-zero failure code
1563  *
1564  *==========================================================================*/
getSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1565 int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1566 {
1567     int32_t rc = NO_ERROR;
1568 
1569     cam_dimension_t max_dim = {0, 0};
1570     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1571         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1572             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1573         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1574             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1575     }
1576 
1577     clear_metadata_buffer(mParameters);
1578 
1579     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1580             max_dim);
1581     if (rc != NO_ERROR) {
1582         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1583         return rc;
1584     }
1585 
1586     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1587     if (rc != NO_ERROR) {
1588         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1589         return rc;
1590     }
1591 
1592     clear_metadata_buffer(mParameters);
1593     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
1594 
1595     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1596             mParameters);
1597     if (rc != NO_ERROR) {
1598         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1599         return rc;
1600     }
1601 
1602     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1603     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1604             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1605             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1606             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1607             sensorModeInfo.num_raw_bits);
1608 
1609     return rc;
1610 }
1611 
1612 /*==============================================================================
1613  * FUNCTION   : getCurrentSensorModeInfo
1614  *
1615  * DESCRIPTION: Get sensor mode information that is currently selected.
1616  *
1617  * PARAMETERS :
1618  *   @sensorModeInfo : sensor mode information (output)
1619  *
1620  * RETURN     : int32_t type of status
1621  *              NO_ERROR  -- success
1622  *              none-zero failure code
1623  *
1624  *==========================================================================*/
getCurrentSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1625 int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1626 {
1627     int32_t rc = NO_ERROR;
1628 
1629     metadata_buffer_t *cachedParameters = (metadata_buffer_t *) malloc(sizeof(metadata_buffer_t));
1630     if (nullptr == cachedParameters) {
1631         return NO_MEMORY;
1632     }
1633 
1634     memcpy(cachedParameters, mParameters, sizeof(metadata_buffer_t));
1635 
1636     clear_metadata_buffer(mParameters);
1637     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1638 
1639     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1640             mParameters);
1641     if (rc != NO_ERROR) {
1642         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1643         free(cachedParameters);
1644         return rc;
1645     }
1646 
1647     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1648     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1649             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1650             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1651             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1652             sensorModeInfo.num_raw_bits);
1653 
1654     memcpy(mParameters, cachedParameters, sizeof(metadata_buffer_t));
1655     free(cachedParameters);
1656 
1657     return rc;
1658 }
1659 
1660 /*==============================================================================
1661  * FUNCTION   : addToPPFeatureMask
1662  *
1663  * DESCRIPTION: add additional features to pp feature mask based on
1664  *              stream type and usecase
1665  *
1666  * PARAMETERS :
1667  *   @stream_format : stream type for feature mask
1668  *   @stream_idx : stream idx within postprocess_mask list to change
1669  *
1670  * RETURN     : NULL
1671  *
1672  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1673 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1674         uint32_t stream_idx)
1675 {
1676     char feature_mask_value[PROPERTY_VALUE_MAX];
1677     cam_feature_mask_t feature_mask;
1678     int args_converted;
1679     int property_len;
1680 
1681     /* Get feature mask from property */
1682 #ifdef _LE_CAMERA_
1683     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1684     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1685     property_len = property_get("persist.camera.hal3.feature",
1686             feature_mask_value, swtnr_feature_mask_value);
1687 #else
1688     property_len = property_get("persist.camera.hal3.feature",
1689             feature_mask_value, "0");
1690 #endif
1691     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1692             (feature_mask_value[1] == 'x')) {
1693         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1694     } else {
1695         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1696     }
1697     if (1 != args_converted) {
1698         feature_mask = 0;
1699         LOGE("Wrong feature mask %s", feature_mask_value);
1700         return;
1701     }
1702 
1703     switch (stream_format) {
1704     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1705         /* Add LLVD to pp feature mask only if video hint is enabled */
1706         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1707             mStreamConfigInfo.postprocess_mask[stream_idx]
1708                     |= CAM_QTI_FEATURE_SW_TNR;
1709             LOGH("Added SW TNR to pp feature mask");
1710         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1711             mStreamConfigInfo.postprocess_mask[stream_idx]
1712                     |= CAM_QCOM_FEATURE_LLVD;
1713             LOGH("Added LLVD SeeMore to pp feature mask");
1714         }
1715         if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1716                 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1717             mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1718         }
1719         if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1720                 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1721             mStreamConfigInfo.postprocess_mask[stream_idx] |=
1722                     CAM_QTI_FEATURE_BINNING_CORRECTION;
1723         }
1724         break;
1725     }
1726     default:
1727         break;
1728     }
1729     LOGD("PP feature mask %llx",
1730             mStreamConfigInfo.postprocess_mask[stream_idx]);
1731 }
1732 
1733 /*==============================================================================
1734  * FUNCTION   : updateFpsInPreviewBuffer
1735  *
1736  * DESCRIPTION: update FPS information in preview buffer.
1737  *
1738  * PARAMETERS :
1739  *   @metadata    : pointer to metadata buffer
1740  *   @frame_number: frame_number to look for in pending buffer list
1741  *
1742  * RETURN     : None
1743  *
1744  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1745 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1746         uint32_t frame_number)
1747 {
1748     // Mark all pending buffers for this particular request
1749     // with corresponding framerate information
1750     for (List<PendingBuffersInRequest>::iterator req =
1751             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1752             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1753         for(List<PendingBufferInfo>::iterator j =
1754                 req->mPendingBufferList.begin();
1755                 j != req->mPendingBufferList.end(); j++) {
1756             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1757             if ((req->frame_number == frame_number) &&
1758                 (channel->getStreamTypeMask() &
1759                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1760                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1761                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1762                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1763                     struct private_handle_t *priv_handle =
1764                         (struct private_handle_t *)(*(j->buffer));
1765                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1766                 }
1767             }
1768         }
1769     }
1770 }
1771 
1772 /*==============================================================================
1773  * FUNCTION   : updateTimeStampInPendingBuffers
1774  *
1775  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1776  *              of a frame number
1777  *
1778  * PARAMETERS :
1779  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1780  *   @timestamp   : timestamp to be set
1781  *
1782  * RETURN     : None
1783  *
1784  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1785 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1786         uint32_t frameNumber, nsecs_t timestamp)
1787 {
1788     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1789             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1790         // WAR: save the av_timestamp to the next frame
1791         if(req->frame_number == frameNumber + 1) {
1792             req->av_timestamp = timestamp;
1793         }
1794 
1795         if (req->frame_number != frameNumber)
1796             continue;
1797 
1798         for (auto k = req->mPendingBufferList.begin();
1799                 k != req->mPendingBufferList.end(); k++ ) {
1800             // WAR: update timestamp when it's not VT usecase
1801             QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1802             if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1803                 m_bAVTimerEnabled)) {
1804                     struct private_handle_t *priv_handle =
1805                         (struct private_handle_t *) (*(k->buffer));
1806                     setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1807             }
1808         }
1809     }
1810     return;
1811 }
1812 
1813 /*===========================================================================
1814  * FUNCTION   : configureStreams
1815  *
1816  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1817  *              and output streams.
1818  *
1819  * PARAMETERS :
1820  *   @stream_list : streams to be configured
1821  *
1822  * RETURN     :
1823  *
1824  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1825 int QCamera3HardwareInterface::configureStreams(
1826         camera3_stream_configuration_t *streamList)
1827 {
1828     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
1829     int rc = 0;
1830 
1831     // Acquire perfLock before configure streams
1832     mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
1833     rc = configureStreamsPerfLocked(streamList);
1834     mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
1835 
1836     return rc;
1837 }
1838 
1839 /*===========================================================================
1840  * FUNCTION   : validateStreamCombination
1841  *
1842  * DESCRIPTION: Validate a given stream combination.
1843  *
1844  * PARAMETERS :
1845  *   @cameraId : camera Id.
1846  *   @stream_list : stream combination to be validated.
1847  *   @status : validation status.
1848  *
1849  * RETURN     : int32_t type of status
1850  *              NO_ERROR  -- success
1851  *              none-zero failure code
1852  *==========================================================================*/
validateStreamCombination(uint32_t cameraId,camera3_stream_configuration_t * streamList,StreamValidateStatus * status)1853 int32_t QCamera3HardwareInterface::validateStreamCombination(uint32_t cameraId,
1854         camera3_stream_configuration_t *streamList /*in*/, StreamValidateStatus *status /*out*/)
1855 {
1856     bool isJpeg = false;
1857     bool bJpegExceeds4K = false;
1858     bool bJpegOnEncoder = false;
1859     uint32_t width_ratio;
1860     uint32_t height_ratio;
1861     size_t rawStreamCnt = 0;
1862     size_t stallStreamCnt = 0;
1863     size_t processedStreamCnt = 0;
1864     size_t pdStatCount = 0;
1865     size_t numYuv888OnEncoder = 0;
1866     cam_dimension_t jpegSize = {0, 0};
1867     camera3_stream_t *zslStream = nullptr;
1868     uint32_t maxEisWidth = 0;
1869     uint32_t maxEisHeight = 0;
1870 
1871     if (status == nullptr) {
1872         LOGE("NULL stream status");
1873         return BAD_VALUE;
1874     }
1875 
1876     // Sanity check stream_list
1877     if (streamList == NULL) {
1878         LOGE("NULL stream configuration");
1879         return BAD_VALUE;
1880     }
1881     if (streamList->streams == NULL) {
1882         LOGE("NULL stream list");
1883         return BAD_VALUE;
1884     }
1885 
1886     if (streamList->num_streams < 1) {
1887         LOGE("Bad number of streams requested: %d",
1888                 streamList->num_streams);
1889         return BAD_VALUE;
1890     }
1891 
1892     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1893         LOGE("Maximum number of streams %d exceeded: %d",
1894                 MAX_NUM_STREAMS, streamList->num_streams);
1895         return BAD_VALUE;
1896     }
1897 
1898     auto rc = validateUsageFlags(cameraId, streamList);
1899     if (rc != NO_ERROR) {
1900         return rc;
1901     }
1902 
1903     rc = validateStreamDimensions(cameraId, streamList);
1904     if (rc == NO_ERROR) {
1905         rc = validateStreamRotations(streamList);
1906     }
1907     if (rc != NO_ERROR) {
1908         LOGE("Invalid stream configuration requested!");
1909         return rc;
1910     }
1911 
1912     size_t count = IS_TYPE_MAX;
1913     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
1914     for (size_t i = 0; i < count; i++) {
1915         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1916             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1917             status->bEisSupported = true;
1918             break;
1919         }
1920     }
1921 
1922     if (status->bEisSupported) {
1923         maxEisWidth = MAX_EIS_WIDTH;
1924         maxEisHeight = MAX_EIS_HEIGHT;
1925     }
1926 
1927     status->maxViewfinderSize = gCamCapability[cameraId]->max_viewfinder_size;
1928     status->largeYuv888Size = {0, 0};
1929     /* stream configurations */
1930     for (size_t i = 0; i < streamList->num_streams; i++) {
1931         camera3_stream_t *newStream = streamList->streams[i];
1932         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1933                 "height = %d, rotation = %d, usage = 0x%x",
1934                  i, newStream->stream_type, newStream->format,
1935                 newStream->width, newStream->height, newStream->rotation,
1936                 newStream->usage);
1937         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1938             status->isZsl = true;
1939             status->inputStream = newStream;
1940         }
1941 
1942         if (IS_USAGE_ZSL(newStream->usage)) {
1943             if (zslStream != nullptr) {
1944                 LOGE("Multiple input/reprocess streams requested!");
1945                 return BAD_VALUE;
1946             }
1947             zslStream = newStream;
1948         }
1949 
1950         if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1951                 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
1952             isJpeg = true;
1953             jpegSize.width = newStream->width;
1954             jpegSize.height = newStream->height;
1955             if (newStream->width > VIDEO_4K_WIDTH ||
1956                     newStream->height > VIDEO_4K_HEIGHT)
1957                 bJpegExceeds4K = true;
1958         }
1959 
1960         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1961                 (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
1962             if (IS_USAGE_VIDEO(newStream->usage)) {
1963                 status->bIsVideo = true;
1964                 // In HAL3 we can have multiple different video streams.
1965                 // The variables video width and height are used below as
1966                 // dimensions of the biggest of them
1967                 if (status->videoWidth < newStream->width ||
1968                         status->videoHeight < newStream->height) {
1969                     status->videoWidth = newStream->width;
1970                     status->videoHeight = newStream->height;
1971                 }
1972                 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1973                         (VIDEO_4K_HEIGHT <= newStream->height)) {
1974                     status->bIs4KVideo = true;
1975                 }
1976             }
1977             status->bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
1978                                   (newStream->height <= maxEisHeight);
1979         }
1980         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1981             switch (newStream->format) {
1982             case HAL_PIXEL_FORMAT_BLOB:
1983                 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1984                     status->depthPresent = true;
1985                     break;
1986                 }
1987                 stallStreamCnt++;
1988                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1989                         newStream->height)) {
1990                     status->numStreamsOnEncoder++;
1991                     bJpegOnEncoder = true;
1992                 }
1993                 width_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.width,
1994                         newStream->width);
1995                 height_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.height,
1996                         newStream->height);;
1997                 FATAL_IF(gCamCapability[cameraId]->max_downscale_factor == 0,
1998                         "FATAL: max_downscale_factor cannot be zero and so assert");
1999                 if ( (width_ratio > gCamCapability[cameraId]->max_downscale_factor) ||
2000                     (height_ratio > gCamCapability[cameraId]->max_downscale_factor)) {
2001                     LOGH("Setting small jpeg size flag to true");
2002                     status->bSmallJpegSize = true;
2003                 }
2004                 break;
2005             case HAL_PIXEL_FORMAT_RAW10:
2006             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2007             case HAL_PIXEL_FORMAT_RAW16:
2008                 rawStreamCnt++;
2009                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2010                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2011                     pdStatCount++;
2012                 }
2013                 break;
2014             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2015                 processedStreamCnt++;
2016                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2017                         newStream->height)) {
2018                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2019                             !IS_USAGE_ZSL(newStream->usage)) {
2020                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021                     }
2022                     status->numStreamsOnEncoder++;
2023                 }
2024                 break;
2025             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2026             case HAL_PIXEL_FORMAT_Y8:
2027                 processedStreamCnt++;
2028                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2029                         newStream->height)) {
2030                     // If Yuv888/Y8 size is not greater than 4K, set feature mask
2031                     // to SUPERSET so that it support concurrent request on
2032                     // YUV and JPEG.
2033                     if (newStream->width <= VIDEO_4K_WIDTH &&
2034                             newStream->height <= VIDEO_4K_HEIGHT) {
2035                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2036                     }
2037                     if (newStream->format == HAL_PIXEL_FORMAT_Y8) {
2038                         status->bY80OnEncoder = true;
2039                     }
2040                     status->numStreamsOnEncoder++;
2041                     numYuv888OnEncoder++;
2042                     status->largeYuv888Size.width = newStream->width;
2043                     status->largeYuv888Size.height = newStream->height;
2044                 }
2045                 break;
2046             default:
2047                 LOGE("not a supported format 0x%x", newStream->format);
2048                 return BAD_VALUE;
2049             }
2050         }
2051     }
2052 
2053     if (validateUsageFlagsForEis(status->bEisSupported, status->bEisSupportedSize, streamList) !=
2054             NO_ERROR) {
2055         return BAD_VALUE;
2056     }
2057 
2058     /* Check if num_streams is sane */
2059     if (stallStreamCnt > MAX_STALLING_STREAMS ||
2060             rawStreamCnt > MAX_RAW_STREAMS ||
2061             processedStreamCnt > MAX_PROCESSED_STREAMS) {
2062         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2063                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
2064         return BAD_VALUE;
2065     }
2066     /* Check whether we have zsl stream or 4k video case */
2067     if (status->isZsl && status->bIs4KVideo) {
2068         LOGE("Currently invalid configuration ZSL & 4K Video!");
2069         return BAD_VALUE;
2070     }
2071     /* Check if stream sizes are sane */
2072     if (status->numStreamsOnEncoder > 2) {
2073         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2074         return BAD_VALUE;
2075     } else if (1 < status->numStreamsOnEncoder){
2076         status->bUseCommonFeatureMask = true;
2077         LOGH("Multiple streams above max viewfinder size, common mask needed");
2078     }
2079 
2080     /* Check if BLOB size is greater than 4k in 4k recording case */
2081     if (status->bIs4KVideo && bJpegExceeds4K) {
2082         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2083         return BAD_VALUE;
2084     }
2085 
2086     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2087             status->depthPresent) {
2088         LOGE("HAL doesn't support depth streams in HFR mode!");
2089         return BAD_VALUE;
2090     }
2091 
2092     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2093     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2094     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2095     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2096     // configurations:
2097     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2098     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2099     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
2100     if (!status->isZsl && bJpegOnEncoder && bJpegExceeds4K && status->bUseCommonFeatureMask) {
2101         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2102                 __func__);
2103         return BAD_VALUE;
2104     }
2105 
2106     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2107     // the YUV stream's size is greater or equal to the JPEG size, set common
2108     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2109     if (numYuv888OnEncoder && isOnEncoder(status->maxViewfinderSize,
2110             jpegSize.width, jpegSize.height) &&
2111             status->largeYuv888Size.width > jpegSize.width &&
2112             status->largeYuv888Size.height > jpegSize.height) {
2113         status->bYuv888OverrideJpeg = true;
2114     } else if (!isJpeg && status->numStreamsOnEncoder > 1) {
2115         status->commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2116     }
2117 
2118     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2119             status->maxViewfinderSize.width, status->maxViewfinderSize.height, status->isZsl,
2120             status->bUseCommonFeatureMask, status->commonFeatureMask);
2121     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2122             status->numStreamsOnEncoder, processedStreamCnt, stallStreamCnt,
2123             status->bSmallJpegSize);
2124 
2125     if (1 < pdStatCount) {
2126         LOGE("HAL doesn't support multiple PD streams");
2127         return BAD_VALUE;
2128     }
2129 
2130     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2131             (1 == pdStatCount)) {
2132         LOGE("HAL doesn't support PD streams in HFR mode!");
2133         return -EINVAL;
2134     }
2135 
2136     return NO_ERROR;
2137 }
2138 
2139 /*===========================================================================
2140  * FUNCTION   : configureStreamsPerfLocked
2141  *
2142  * DESCRIPTION: configureStreams while perfLock is held.
2143  *
2144  * PARAMETERS :
2145  *   @stream_list : streams to be configured
2146  *
2147  * RETURN     : int32_t type of status
2148  *              NO_ERROR  -- success
2149  *              none-zero failure code
2150  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)2151 int QCamera3HardwareInterface::configureStreamsPerfLocked(
2152         camera3_stream_configuration_t *streamList)
2153 {
2154     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
2155 
2156     StreamValidateStatus streamStatus;
2157     auto rc = validateStreamCombination(mCameraId, streamList, &streamStatus);
2158     if (NO_ERROR != rc) {
2159         return rc;
2160     }
2161 
2162     mOpMode = streamList->operation_mode;
2163     LOGD("mOpMode: %d", mOpMode);
2164 
2165     // Disable HDR+ if it's enabled;
2166     {
2167         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
2168         finishHdrPlusClientOpeningLocked(l);
2169         disableHdrPlusModeLocked();
2170     }
2171 
2172     /* first invalidate all the steams in the mStreamList
2173      * if they appear again, they will be validated */
2174     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2175             it != mStreamInfo.end(); it++) {
2176         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
2177         if (channel) {
2178           channel->stop();
2179         }
2180         (*it)->status = INVALID;
2181     }
2182 
2183     if (mRawDumpChannel) {
2184         mRawDumpChannel->stop();
2185         delete mRawDumpChannel;
2186         mRawDumpChannel = NULL;
2187     }
2188 
2189     if (mHdrPlusRawSrcChannel) {
2190         mHdrPlusRawSrcChannel->stop();
2191         delete mHdrPlusRawSrcChannel;
2192         mHdrPlusRawSrcChannel = NULL;
2193     }
2194 
2195     if (mSupportChannel)
2196         mSupportChannel->stop();
2197 
2198     if (mAnalysisChannel) {
2199         mAnalysisChannel->stop();
2200     }
2201     if (mMetadataChannel) {
2202         /* If content of mStreamInfo is not 0, there is metadata stream */
2203         mMetadataChannel->stop();
2204     }
2205     if (mChannelHandle) {
2206         stopChannelLocked(/*stop_immediately*/false);
2207     }
2208 
2209     pthread_mutex_lock(&mMutex);
2210 
2211     mPictureChannel = NULL;
2212 
2213     // Check state
2214     switch (mState) {
2215         case INITIALIZED:
2216         case CONFIGURED:
2217         case STARTED:
2218             /* valid state */
2219             break;
2220         default:
2221             LOGE("Invalid state %d", mState);
2222             pthread_mutex_unlock(&mMutex);
2223             return -ENODEV;
2224     }
2225 
2226     /* Check whether we have video stream */
2227     m_bIs4KVideo = streamStatus.bIs4KVideo;
2228     m_bIsVideo = streamStatus.bIsVideo;
2229     m_bEisSupported = streamStatus.bEisSupported;
2230     m_bEisSupportedSize = streamStatus.bEisSupportedSize;
2231     m_bTnrEnabled = false;
2232     m_bVideoHdrEnabled = false;
2233     cam_dimension_t previewSize = {0, 0};
2234 
2235     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
2236 
2237     /*EIS configuration*/
2238     uint8_t eis_prop_set;
2239 
2240     // Initialize all instant AEC related variables
2241     mInstantAEC = false;
2242     mResetInstantAEC = false;
2243     mInstantAECSettledFrameNumber = 0;
2244     mAecSkipDisplayFrameBound = 0;
2245     mInstantAecFrameIdxCount = 0;
2246     mCurrFeatureState = 0;
2247     mStreamConfig = true;
2248 
2249     m_bAVTimerEnabled = false;
2250 
2251     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
2252 
2253     /* EIS setprop control */
2254     char eis_prop[PROPERTY_VALUE_MAX];
2255     memset(eis_prop, 0, sizeof(eis_prop));
2256     property_get("persist.camera.eis.enable", eis_prop, "1");
2257     eis_prop_set = (uint8_t)atoi(eis_prop);
2258 
2259     m_bEisEnable = eis_prop_set && m_bEisSupported &&
2260             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2261             (gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
2262              gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
2263 
2264     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
2265             m_bEisEnable, eis_prop_set, m_bEisSupported);
2266 
2267     uint8_t forceEnableTnr = 0;
2268     char tnr_prop[PROPERTY_VALUE_MAX];
2269     memset(tnr_prop, 0, sizeof(tnr_prop));
2270     property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2271     forceEnableTnr = (uint8_t)atoi(tnr_prop);
2272 
2273     /* Logic to enable/disable TNR based on specific config size/etc.*/
2274     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
2275             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2276         m_bTnrEnabled = true;
2277     else if (forceEnableTnr)
2278         m_bTnrEnabled = true;
2279 
2280     char videoHdrProp[PROPERTY_VALUE_MAX];
2281     memset(videoHdrProp, 0, sizeof(videoHdrProp));
2282     property_get("persist.camera.hdr.video", videoHdrProp, "0");
2283     uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2284 
2285     if (hdr_mode_prop == 1 && m_bIsVideo &&
2286             mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2287         m_bVideoHdrEnabled = true;
2288     else
2289         m_bVideoHdrEnabled = false;
2290 
2291     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2292     for (size_t i = 0; i < streamList->num_streams; i++) {
2293         camera3_stream_t *newStream = streamList->streams[i];
2294         LOGH("newStream type = %d, stream format = %d "
2295                 "stream size : %d x %d, stream rotation = %d",
2296                  newStream->stream_type, newStream->format,
2297                 newStream->width, newStream->height, newStream->rotation);
2298         //if the stream is in the mStreamList validate it
2299         bool stream_exists = false;
2300         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2301                 it != mStreamInfo.end(); it++) {
2302             if ((*it)->stream == newStream) {
2303                 QCamera3ProcessingChannel *channel =
2304                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
2305                 stream_exists = true;
2306                 if (channel)
2307                     delete channel;
2308                 (*it)->status = VALID;
2309                 (*it)->stream->priv = NULL;
2310                 (*it)->channel = NULL;
2311             }
2312         }
2313         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2314             //new stream
2315             stream_info_t* stream_info;
2316             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2317             if (!stream_info) {
2318                LOGE("Could not allocate stream info");
2319                rc = -ENOMEM;
2320                pthread_mutex_unlock(&mMutex);
2321                return rc;
2322             }
2323             stream_info->stream = newStream;
2324             stream_info->status = VALID;
2325             stream_info->channel = NULL;
2326             stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
2327             mStreamInfo.push_back(stream_info);
2328         }
2329         /* Covers Opaque ZSL and API1 F/W ZSL */
2330         if (IS_USAGE_ZSL(newStream->usage)
2331                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2332             if (zslStream != NULL) {
2333                 LOGE("Multiple input/reprocess streams requested!");
2334                 pthread_mutex_unlock(&mMutex);
2335                 return BAD_VALUE;
2336             }
2337             zslStream = newStream;
2338         }
2339         /* Covers YUV reprocess */
2340         if (streamStatus.inputStream != NULL) {
2341             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2342                     && ((newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2343                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888)
2344                         || (newStream->format == HAL_PIXEL_FORMAT_Y8
2345                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_Y8))
2346                     && streamStatus.inputStream->width == newStream->width
2347                     && streamStatus.inputStream->height == newStream->height) {
2348                 if (zslStream != NULL) {
2349                     /* This scenario indicates multiple YUV streams with same size
2350                      * as input stream have been requested, since zsl stream handle
2351                      * is solely use for the purpose of overriding the size of streams
2352                      * which share h/w streams we will just make a guess here as to
2353                      * which of the stream is a ZSL stream, this will be refactored
2354                      * once we make generic logic for streams sharing encoder output
2355                      */
2356                     LOGH("Warning, Multiple ip/reprocess streams requested!");
2357                 }
2358                 zslStream = newStream;
2359             }
2360         }
2361     }
2362 
2363     /* If a zsl stream is set, we know that we have configured at least one input or
2364        bidirectional stream */
2365     if (NULL != zslStream) {
2366         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2367         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2368         mInputStreamInfo.format = zslStream->format;
2369         mInputStreamInfo.usage = zslStream->usage;
2370         LOGD("Input stream configured! %d x %d, format %d, usage %d",
2371                  mInputStreamInfo.dim.width,
2372                 mInputStreamInfo.dim.height,
2373                 mInputStreamInfo.format, mInputStreamInfo.usage);
2374     }
2375 
2376     cleanAndSortStreamInfo();
2377     if (mMetadataChannel) {
2378         delete mMetadataChannel;
2379         mMetadataChannel = NULL;
2380     }
2381     if (mSupportChannel) {
2382         delete mSupportChannel;
2383         mSupportChannel = NULL;
2384     }
2385 
2386     if (mAnalysisChannel) {
2387         delete mAnalysisChannel;
2388         mAnalysisChannel = NULL;
2389     }
2390 
2391     if (mDummyBatchChannel) {
2392         delete mDummyBatchChannel;
2393         mDummyBatchChannel = NULL;
2394     }
2395 
2396     if (mDepthChannel) {
2397         mDepthChannel = NULL;
2398     }
2399     mDepthCloudMode = CAM_PD_DATA_SKIP;
2400 
2401     mShutterDispatcher.clear();
2402     mOutputBufferDispatcher.clear();
2403 
2404     char is_type_value[PROPERTY_VALUE_MAX];
2405     property_get("persist.camera.is_type", is_type_value, "4");
2406     m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2407 
2408     char property_value[PROPERTY_VALUE_MAX];
2409     property_get("persist.camera.gzoom.at", property_value, "0");
2410     int goog_zoom_at = atoi(property_value);
2411     bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2412         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2413     bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2414         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2415 
2416     property_get("persist.camera.gzoom.4k", property_value, "0");
2417     bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2418 
2419     //Create metadata channel and initialize it
2420     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2421     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2422             gCamCapability[mCameraId]->color_arrangement);
2423     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2424                     mChannelHandle, mCameraHandle->ops, captureResultCb,
2425                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
2426     if (mMetadataChannel == NULL) {
2427         LOGE("failed to allocate metadata channel");
2428         rc = -ENOMEM;
2429         pthread_mutex_unlock(&mMutex);
2430         return rc;
2431     }
2432     mMetadataChannel->enableDepthData(streamStatus.depthPresent);
2433     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2434     if (rc < 0) {
2435         LOGE("metadata channel initialization failed");
2436         delete mMetadataChannel;
2437         mMetadataChannel = NULL;
2438         pthread_mutex_unlock(&mMutex);
2439         return rc;
2440     }
2441 
2442     cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2443     bool isRawStreamRequested = false;
2444     bool onlyRaw = true;
2445     // Keep track of preview/video streams indices.
2446     // There could be more than one preview streams, but only one video stream.
2447     int32_t video_stream_idx = -1;
2448     int32_t preview_stream_idx[streamList->num_streams];
2449     size_t preview_stream_cnt = 0;
2450     bool previewTnr[streamList->num_streams];
2451     memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2452     bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2453     // Loop through once to determine preview TNR conditions before creating channels.
2454     for (size_t i = 0; i < streamList->num_streams; i++) {
2455         camera3_stream_t *newStream = streamList->streams[i];
2456         uint32_t stream_usage = newStream->usage;
2457         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2458                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2459             if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2460                 video_stream_idx = (int32_t)i;
2461             else
2462                 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2463         }
2464     }
2465     // By default, preview stream TNR is disabled.
2466     // Enable TNR to the preview stream if all conditions below are satisfied:
2467     //  1. preview resolution == video resolution.
2468     //  2. video stream TNR is enabled.
2469     //  3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2470     for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2471         camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2472         camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2473         if (m_bTnrEnabled && m_bTnrVideo &&
2474                 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2475                 video_stream->width == preview_stream->width &&
2476                 video_stream->height == preview_stream->height) {
2477             previewTnr[preview_stream_idx[i]] = true;
2478         }
2479     }
2480 
2481     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2482     /* Allocate channel objects for the requested streams */
2483     for (size_t i = 0; i < streamList->num_streams; i++) {
2484 
2485         camera3_stream_t *newStream = streamList->streams[i];
2486         uint32_t stream_usage = newStream->usage;
2487         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2488         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2489         struct camera_info *p_info = NULL;
2490         pthread_mutex_lock(&gCamLock);
2491         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2492         pthread_mutex_unlock(&gCamLock);
2493         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2494                 || IS_USAGE_ZSL(newStream->usage)) &&
2495             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2496             onlyRaw = false; // There is non-raw stream - bypass flag if set
2497             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2498             if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width, newStream->height)) {
2499                 if (streamStatus.bUseCommonFeatureMask)
2500                     zsl_ppmask = streamStatus.commonFeatureMask;
2501                 else
2502                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2503             } else {
2504                 if (streamStatus.numStreamsOnEncoder > 0)
2505                     zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506                 else
2507                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2508             }
2509             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
2510         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2511             onlyRaw = false; // There is non-raw stream - bypass flag if set
2512                 LOGH("Input stream configured, reprocess config");
2513         } else {
2514             //for non zsl streams find out the format
2515             switch (newStream->format) {
2516             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2517             {
2518                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2519                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2520                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2521                 /* add additional features to pp feature mask */
2522                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2523                         mStreamConfigInfo.num_streams);
2524 
2525                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2526                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2527                                 CAM_STREAM_TYPE_VIDEO;
2528                     if (m_bTnrEnabled && m_bTnrVideo) {
2529                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2530                             CAM_QCOM_FEATURE_CPP_TNR;
2531                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2532                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2533                                 ~CAM_QCOM_FEATURE_CDS;
2534                     }
2535                     if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2536                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2537                             CAM_QTI_FEATURE_PPEISCORE;
2538                     }
2539                     if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2540                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2541                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2542                     }
2543                 } else {
2544                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2545                             CAM_STREAM_TYPE_PREVIEW;
2546                     if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
2547                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2548                                 CAM_QCOM_FEATURE_CPP_TNR;
2549                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2550                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2551                                 ~CAM_QCOM_FEATURE_CDS;
2552                     }
2553                     if(!m_bSwTnrPreview) {
2554                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2555                                 ~CAM_QTI_FEATURE_SW_TNR;
2556                     }
2557                     if (is_goog_zoom_preview_enabled) {
2558                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2559                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2560                     }
2561                     padding_info.width_padding = mSurfaceStridePadding;
2562                     padding_info.height_padding = CAM_PAD_TO_2;
2563                     previewSize.width = (int32_t)newStream->width;
2564                     previewSize.height = (int32_t)newStream->height;
2565                 }
2566                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2567                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2568                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2569                             newStream->height;
2570                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2571                             newStream->width;
2572                 }
2573             }
2574             break;
2575             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2576             case HAL_PIXEL_FORMAT_Y8:
2577                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2578                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2579                 if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2580                             newStream->height)) {
2581                     if (streamStatus.bUseCommonFeatureMask)
2582                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2583                                 streamStatus.commonFeatureMask;
2584                     else
2585                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2586                                 CAM_QCOM_FEATURE_NONE;
2587                 } else {
2588                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2589                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2590                 }
2591             break;
2592             case HAL_PIXEL_FORMAT_BLOB:
2593                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2594                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2595                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2596                 if ((m_bIs4KVideo && !streamStatus.isZsl) ||
2597                         (streamStatus.bSmallJpegSize && !streamStatus.isZsl)) {
2598                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2599                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2600                      /* Remove rotation if it is not supported
2601                         for 4K LiveVideo snapshot case (online processing) */
2602                      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2603                                 CAM_QCOM_FEATURE_ROTATION)) {
2604                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2605                                  &= ~CAM_QCOM_FEATURE_ROTATION;
2606                      }
2607                 } else {
2608                     if (streamStatus.bUseCommonFeatureMask &&
2609                             isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2610                             newStream->height)) {
2611                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2612                                 streamStatus.commonFeatureMask;
2613                     } else {
2614                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2615                     }
2616                 }
2617                 if (streamStatus.isZsl) {
2618                     if (zslStream) {
2619                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2620                                 (int32_t)zslStream->width;
2621                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2622                                 (int32_t)zslStream->height;
2623                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2624                                 zsl_ppmask;
2625                     } else {
2626                         LOGE("Error, No ZSL stream identified");
2627                         pthread_mutex_unlock(&mMutex);
2628                         return -EINVAL;
2629                     }
2630                 } else if (m_bIs4KVideo) {
2631                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2632                             (int32_t) streamStatus.videoWidth;
2633                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2634                             (int32_t) streamStatus.videoHeight;
2635                 } else if (streamStatus.bYuv888OverrideJpeg) {
2636                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2637                             (int32_t) streamStatus.largeYuv888Size.width;
2638                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2639                             (int32_t) streamStatus.largeYuv888Size.height;
2640                 }
2641                 break;
2642             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2643             case HAL_PIXEL_FORMAT_RAW16:
2644             case HAL_PIXEL_FORMAT_RAW10:
2645                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2646                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2647                 isRawStreamRequested = true;
2648                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2649                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2650                     mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2651                             gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2652                     mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2653                             gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2654                     mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2655                             gCamCapability[mCameraId]->dt[mPDIndex];
2656                     mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2657                             gCamCapability[mCameraId]->vc[mPDIndex];
2658                 }
2659                 break;
2660             default:
2661                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2662                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2663                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2664                 break;
2665             }
2666         }
2667 
2668         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2669                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2670                 gCamCapability[mCameraId]->color_arrangement);
2671 
2672         if (newStream->priv == NULL) {
2673             //New stream, construct channel
2674             switch (newStream->stream_type) {
2675             case CAMERA3_STREAM_INPUT:
2676                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2677                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2678                 break;
2679             case CAMERA3_STREAM_BIDIRECTIONAL:
2680                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2681                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2682                 break;
2683             case CAMERA3_STREAM_OUTPUT:
2684                 /* For video encoding stream, set read/write rarely
2685                  * flag so that they may be set to un-cached */
2686                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2687                     newStream->usage |=
2688                          (GRALLOC_USAGE_SW_READ_RARELY |
2689                          GRALLOC_USAGE_SW_WRITE_RARELY |
2690                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2691                 else if (IS_USAGE_ZSL(newStream->usage))
2692                 {
2693                     LOGD("ZSL usage flag skipping");
2694                 }
2695                 else if (newStream == zslStream
2696                         || (newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
2697                             newStream->format == HAL_PIXEL_FORMAT_Y8)) {
2698                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2699                 } else
2700                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2701                 break;
2702             default:
2703                 LOGE("Invalid stream_type %d", newStream->stream_type);
2704                 break;
2705             }
2706 
2707             bool forcePreviewUBWC = true;
2708             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2709                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2710                 QCamera3ProcessingChannel *channel = NULL;
2711                 switch (newStream->format) {
2712                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2713                     if ((newStream->usage &
2714                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2715                             (streamList->operation_mode ==
2716                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2717                     ) {
2718                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2719                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2720                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2721                                 this,
2722                                 newStream,
2723                                 (cam_stream_type_t)
2724                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2725                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2726                                 mMetadataChannel,
2727                                 0); //heap buffers are not required for HFR video channel
2728                         if (channel == NULL) {
2729                             LOGE("allocation of channel failed");
2730                             pthread_mutex_unlock(&mMutex);
2731                             return -ENOMEM;
2732                         }
2733                         //channel->getNumBuffers() will return 0 here so use
2734                         //MAX_INFLIGH_HFR_REQUESTS
2735                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2736                         newStream->priv = channel;
2737                         LOGI("num video buffers in HFR mode: %d",
2738                                  MAX_INFLIGHT_HFR_REQUESTS);
2739                     } else {
2740                         /* Copy stream contents in HFR preview only case to create
2741                          * dummy batch channel so that sensor streaming is in
2742                          * HFR mode */
2743                         if (!m_bIsVideo && (streamList->operation_mode ==
2744                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2745                             mDummyBatchStream = *newStream;
2746                             mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
2747                         }
2748                         int bufferCount = MAX_INFLIGHT_REQUESTS;
2749                         if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2750                                 CAM_STREAM_TYPE_VIDEO) {
2751                             if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2752                                 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2753                                 bufferCount = m_bIs4KVideo ?
2754                                     MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2755                             }
2756 
2757                         }
2758                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2759                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2760                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2761                                 this,
2762                                 newStream,
2763                                 (cam_stream_type_t)
2764                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2765                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766                                 mMetadataChannel,
2767                                 bufferCount);
2768                         if (channel == NULL) {
2769                             LOGE("allocation of channel failed");
2770                             pthread_mutex_unlock(&mMutex);
2771                             return -ENOMEM;
2772                         }
2773                         /* disable UBWC for preview, though supported,
2774                          * to take advantage of CPP duplication */
2775                         if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
2776                                 (previewSize.width == (int32_t) streamStatus.videoWidth)&&
2777                                 (previewSize.height == (int32_t) streamStatus.videoHeight)){
2778                             forcePreviewUBWC = false;
2779                         }
2780                         channel->setUBWCEnabled(forcePreviewUBWC);
2781                          /* When goog_zoom is linked to the preview or video stream,
2782                           * disable ubwc to the linked stream */
2783                         if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2784                                 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2785                             channel->setUBWCEnabled(false);
2786                         }
2787                         newStream->max_buffers = channel->getNumBuffers();
2788                         newStream->priv = channel;
2789                     }
2790                     break;
2791                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2792                 case HAL_PIXEL_FORMAT_Y8: {
2793                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2794                             mChannelHandle,
2795                             mCameraHandle->ops, captureResultCb,
2796                             setBufferErrorStatus, &padding_info,
2797                             this,
2798                             newStream,
2799                             (cam_stream_type_t)
2800                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2801                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2802                             mMetadataChannel);
2803                     if (channel == NULL) {
2804                         LOGE("allocation of YUV channel failed");
2805                         pthread_mutex_unlock(&mMutex);
2806                         return -ENOMEM;
2807                     }
2808                     newStream->max_buffers = channel->getNumBuffers();
2809                     newStream->priv = channel;
2810                     break;
2811                 }
2812                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2813                 case HAL_PIXEL_FORMAT_RAW16:
2814                 case HAL_PIXEL_FORMAT_RAW10: {
2815                     bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2816                             (HAL_DATASPACE_DEPTH != newStream->data_space))
2817                             ? true : false;
2818                     mRawChannel = new QCamera3RawChannel(
2819                             mCameraHandle->camera_handle, mChannelHandle,
2820                             mCameraHandle->ops, captureResultCb,
2821                             setBufferErrorStatus, &padding_info,
2822                             this, newStream,
2823                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2824                             mMetadataChannel, isRAW16);
2825                     if (mRawChannel == NULL) {
2826                         LOGE("allocation of raw channel failed");
2827                         pthread_mutex_unlock(&mMutex);
2828                         return -ENOMEM;
2829                     }
2830                     newStream->max_buffers = mRawChannel->getNumBuffers();
2831                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2832                     break;
2833                 }
2834                 case HAL_PIXEL_FORMAT_BLOB:
2835                     if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2836                         mDepthChannel = new QCamera3DepthChannel(
2837                                 mCameraHandle->camera_handle, mChannelHandle,
2838                                 mCameraHandle->ops, NULL, NULL, &padding_info,
2839                                 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2840                                 mMetadataChannel);
2841                         if (NULL == mDepthChannel) {
2842                             LOGE("Allocation of depth channel failed");
2843                             pthread_mutex_unlock(&mMutex);
2844                             return NO_MEMORY;
2845                         }
2846                         newStream->priv = mDepthChannel;
2847                         newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2848                     } else {
2849                         // Max live snapshot inflight buffer is 1. This is to mitigate
2850                         // frame drop issues for video snapshot. The more buffers being
2851                         // allocated, the more frame drops there are.
2852                         mPictureChannel = new QCamera3PicChannel(
2853                                 mCameraHandle->camera_handle, mChannelHandle,
2854                                 mCameraHandle->ops, captureResultCb,
2855                                 setBufferErrorStatus, &padding_info, this, newStream,
2856                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2857                                 m_bIs4KVideo, streamStatus.isZsl, streamStatus.bY80OnEncoder,
2858                                 mMetadataChannel, (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2859                         if (mPictureChannel == NULL) {
2860                             LOGE("allocation of channel failed");
2861                             pthread_mutex_unlock(&mMutex);
2862                             return -ENOMEM;
2863                         }
2864                         newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2865                         newStream->max_buffers = mPictureChannel->getNumBuffers();
2866                         mPictureChannel->overrideYuvSize(
2867                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2868                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2869                     }
2870                     break;
2871 
2872                 default:
2873                     LOGE("not a supported format 0x%x", newStream->format);
2874                     pthread_mutex_unlock(&mMutex);
2875                     return -EINVAL;
2876                 }
2877             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2878                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2879             } else {
2880                 LOGE("Error, Unknown stream type");
2881                 pthread_mutex_unlock(&mMutex);
2882                 return -EINVAL;
2883             }
2884 
2885             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2886             if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2887                 // Here we only care whether it's EIS3 or not
2888                 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2889                 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2890                         mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2891                     isType = IS_TYPE_NONE;
2892                 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
2893                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2894                         newStream->width, newStream->height, forcePreviewUBWC, isType);
2895                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2896                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2897                 }
2898             }
2899 
2900             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2901                     it != mStreamInfo.end(); it++) {
2902                 if ((*it)->stream == newStream) {
2903                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2904                     break;
2905                 }
2906             }
2907         } else {
2908             // Channel already exists for this stream
2909             // Do nothing for now
2910         }
2911         padding_info = gCamCapability[mCameraId]->padding_info;
2912 
2913         /* Do not add entries for input&depth stream in metastream info
2914          * since there is no real stream associated with it
2915          */
2916         if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2917                 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2918                         (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
2919             mStreamConfigInfo.num_streams++;
2920         }
2921     }
2922 
2923     // Let buffer dispatcher know the configured streams.
2924     mOutputBufferDispatcher.configureStreams(streamList);
2925 
2926     if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2927         onlyRaw = false;
2928     }
2929 
2930     // Create analysis stream all the time, even when h/w support is not available
2931     if (!onlyRaw) {
2932         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2933         cam_analysis_info_t analysisInfo;
2934         int32_t ret = NO_ERROR;
2935         ret = mCommon.getAnalysisInfo(
2936                 FALSE,
2937                 analysisFeatureMask,
2938                 &analysisInfo);
2939         if (ret == NO_ERROR) {
2940             cam_color_filter_arrangement_t analysis_color_arrangement =
2941                     (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2942                     CAM_FILTER_ARRANGEMENT_Y :
2943                     gCamCapability[mCameraId]->color_arrangement);
2944             setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2945                                                analysis_color_arrangement);
2946             cam_dimension_t analysisDim;
2947             analysisDim = mCommon.getMatchingDimension(previewSize,
2948                     analysisInfo.analysis_recommended_res);
2949 
2950             mAnalysisChannel = new QCamera3SupportChannel(
2951                     mCameraHandle->camera_handle,
2952                     mChannelHandle,
2953                     mCameraHandle->ops,
2954                     &analysisInfo.analysis_padding_info,
2955                     analysisFeatureMask,
2956                     CAM_STREAM_TYPE_ANALYSIS,
2957                     &analysisDim,
2958                     (analysisInfo.analysis_format
2959                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2960                     : CAM_FORMAT_YUV_420_NV21),
2961                     analysisInfo.hw_analysis_supported,
2962                     gCamCapability[mCameraId]->color_arrangement,
2963                     this,
2964                     0); // force buffer count to 0
2965         } else {
2966             LOGW("getAnalysisInfo failed, ret = %d", ret);
2967         }
2968         if (!mAnalysisChannel) {
2969             LOGW("Analysis channel cannot be created");
2970         }
2971     }
2972 
2973     //RAW DUMP channel
2974     if (mEnableRawDump && isRawStreamRequested == false){
2975         cam_dimension_t rawDumpSize;
2976         rawDumpSize = getMaxRawSize(mCameraId);
2977         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2978         setPAAFSupport(rawDumpFeatureMask,
2979                 CAM_STREAM_TYPE_RAW,
2980                 gCamCapability[mCameraId]->color_arrangement);
2981         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2982                                   mChannelHandle,
2983                                   mCameraHandle->ops,
2984                                   rawDumpSize,
2985                                   &padding_info,
2986                                   this, rawDumpFeatureMask);
2987         if (!mRawDumpChannel) {
2988             LOGE("Raw Dump channel cannot be created");
2989             pthread_mutex_unlock(&mMutex);
2990             return -ENOMEM;
2991         }
2992     }
2993 
2994     if (mAnalysisChannel) {
2995         cam_analysis_info_t analysisInfo;
2996         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2997         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2998                 CAM_STREAM_TYPE_ANALYSIS;
2999         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3000                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3001         rc = mCommon.getAnalysisInfo(FALSE,
3002                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3003                 &analysisInfo);
3004         if (rc != NO_ERROR) {
3005             LOGE("getAnalysisInfo failed, ret = %d", rc);
3006             pthread_mutex_unlock(&mMutex);
3007             return rc;
3008         }
3009         cam_color_filter_arrangement_t analysis_color_arrangement =
3010                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
3011                 CAM_FILTER_ARRANGEMENT_Y :
3012                 gCamCapability[mCameraId]->color_arrangement);
3013         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3014                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3015                 analysis_color_arrangement);
3016 
3017         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3018                 mCommon.getMatchingDimension(previewSize,
3019                 analysisInfo.analysis_recommended_res);
3020         mStreamConfigInfo.num_streams++;
3021     }
3022 
3023     if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
3024         cam_analysis_info_t supportInfo;
3025         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
3026         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3027         setPAAFSupport(callbackFeatureMask,
3028                 CAM_STREAM_TYPE_CALLBACK,
3029                 gCamCapability[mCameraId]->color_arrangement);
3030         int32_t ret = NO_ERROR;
3031         ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
3032         if (ret != NO_ERROR) {
3033             /* Ignore the error for Mono camera
3034              * because the PAAF bit mask is only set
3035              * for CAM_STREAM_TYPE_ANALYSIS stream type
3036              */
3037             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
3038                 LOGW("getAnalysisInfo failed, ret = %d", ret);
3039             }
3040         }
3041         mSupportChannel = new QCamera3SupportChannel(
3042                 mCameraHandle->camera_handle,
3043                 mChannelHandle,
3044                 mCameraHandle->ops,
3045                 &gCamCapability[mCameraId]->padding_info,
3046                 callbackFeatureMask,
3047                 CAM_STREAM_TYPE_CALLBACK,
3048                 &QCamera3SupportChannel::kDim,
3049                 CAM_FORMAT_YUV_420_NV21,
3050                 supportInfo.hw_analysis_supported,
3051                 gCamCapability[mCameraId]->color_arrangement,
3052                 this, 0);
3053         if (!mSupportChannel) {
3054             LOGE("dummy channel cannot be created");
3055             pthread_mutex_unlock(&mMutex);
3056             return -ENOMEM;
3057         }
3058     }
3059 
3060     if (mSupportChannel) {
3061         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3062                 QCamera3SupportChannel::kDim;
3063         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3064                 CAM_STREAM_TYPE_CALLBACK;
3065         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3066                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3067         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3068                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3069                 gCamCapability[mCameraId]->color_arrangement);
3070         mStreamConfigInfo.num_streams++;
3071     }
3072 
3073     if (mRawDumpChannel) {
3074         cam_dimension_t rawSize;
3075         rawSize = getMaxRawSize(mCameraId);
3076         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3077                 rawSize;
3078         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3079                 CAM_STREAM_TYPE_RAW;
3080         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3081                 CAM_QCOM_FEATURE_NONE;
3082         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3083                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3084                 gCamCapability[mCameraId]->color_arrangement);
3085         mStreamConfigInfo.num_streams++;
3086     }
3087 
3088     if (mHdrPlusRawSrcChannel) {
3089         cam_dimension_t rawSize;
3090         rawSize = getMaxRawSize(mCameraId);
3091         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3092         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3093         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3094         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3095                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3096                 gCamCapability[mCameraId]->color_arrangement);
3097         mStreamConfigInfo.num_streams++;
3098     }
3099 
3100     /* In HFR mode, if video stream is not added, create a dummy channel so that
3101      * ISP can create a batch mode even for preview only case. This channel is
3102      * never 'start'ed (no stream-on), it is only 'initialized'  */
3103     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3104             !m_bIsVideo) {
3105         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3106         setPAAFSupport(dummyFeatureMask,
3107                 CAM_STREAM_TYPE_VIDEO,
3108                 gCamCapability[mCameraId]->color_arrangement);
3109         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3110                 mChannelHandle,
3111                 mCameraHandle->ops, captureResultCb,
3112                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
3113                 this,
3114                 &mDummyBatchStream,
3115                 CAM_STREAM_TYPE_VIDEO,
3116                 dummyFeatureMask,
3117                 mMetadataChannel);
3118         if (NULL == mDummyBatchChannel) {
3119             LOGE("creation of mDummyBatchChannel failed."
3120                     "Preview will use non-hfr sensor mode ");
3121         }
3122     }
3123     if (mDummyBatchChannel) {
3124         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3125                 mDummyBatchStream.width;
3126         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3127                 mDummyBatchStream.height;
3128         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3129                 CAM_STREAM_TYPE_VIDEO;
3130         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3131                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3132         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3133                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3134                 gCamCapability[mCameraId]->color_arrangement);
3135         mStreamConfigInfo.num_streams++;
3136     }
3137 
3138     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3139     mStreamConfigInfo.buffer_info.max_buffers =
3140             m_bIs4KVideo ? 0 :
3141             m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3142 
3143     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3144     for (pendingRequestIterator i = mPendingRequestsList.begin();
3145             i != mPendingRequestsList.end();) {
3146         i = erasePendingRequest(i);
3147     }
3148     mPendingFrameDropList.clear();
3149     // Initialize/Reset the pending buffers list
3150     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3151         req.mPendingBufferList.clear();
3152     }
3153     mPendingBuffersMap.mPendingBuffersInRequest.clear();
3154     mExpectedInflightDuration = 0;
3155     mExpectedFrameDuration = 0;
3156 
3157     mCurJpegMeta.clear();
3158     //Get min frame duration for this streams configuration
3159     deriveMinFrameDuration();
3160 
3161     mFirstPreviewIntentSeen = false;
3162 
3163     // Update state
3164     mState = CONFIGURED;
3165 
3166     mFirstMetadataCallback = true;
3167 
3168     memset(&mLastEISCropInfo, 0, sizeof(mLastEISCropInfo));
3169 
3170     if (streamList->session_parameters != nullptr) {
3171         CameraMetadata meta;
3172         meta = streamList->session_parameters;
3173 
3174         // send an unconfigure to the backend so that the isp
3175         // resources are deallocated
3176         if (!mFirstConfiguration) {
3177             cam_stream_size_info_t stream_config_info;
3178             int32_t hal_version = CAM_HAL_V3;
3179             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3180             stream_config_info.buffer_info.min_buffers =
3181                     MIN_INFLIGHT_REQUESTS;
3182             stream_config_info.buffer_info.max_buffers =
3183                     m_bIs4KVideo ? 0 :
3184                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3185             clear_metadata_buffer(mParameters);
3186             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3187                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3188             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3189                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3190             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3191                     mParameters);
3192             if (rc < 0) {
3193                 LOGE("set_parms for unconfigure failed");
3194                 pthread_mutex_unlock(&mMutex);
3195                 return rc;
3196             }
3197 
3198         }
3199         /* get eis information for stream configuration */
3200         cam_is_type_t isTypePreview, is_type=IS_TYPE_NONE;
3201         char is_type_value[PROPERTY_VALUE_MAX];
3202         property_get("persist.camera.is_type", is_type_value, "4");
3203         m_ISTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3204         // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3205         property_get("persist.camera.is_type_preview", is_type_value, "4");
3206         isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3207         LOGD("isTypeVideo: %d isTypePreview: %d", m_ISTypeVideo, isTypePreview);
3208 
3209         int32_t hal_version = CAM_HAL_V3;
3210         clear_metadata_buffer(mParameters);
3211         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3212         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
3213 
3214         if (mFirstConfiguration) {
3215             // configure instant AEC
3216             // Instant AEC is a session based parameter and it is needed only
3217             // once per complete session after open camera.
3218             // i.e. This is set only once for the first capture request, after open camera.
3219             setInstantAEC(meta);
3220         }
3221 
3222         bool setEis = isEISEnabled(meta);
3223         int32_t vsMode;
3224         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3225         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3226             rc = BAD_VALUE;
3227         }
3228         LOGD("setEis %d", setEis);
3229         bool eis3Supported = false;
3230         size_t count = IS_TYPE_MAX;
3231         count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3232         for (size_t i = 0; i < count; i++) {
3233             if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3234                 eis3Supported = true;
3235                 break;
3236             }
3237         }
3238 
3239         //IS type will be 0 unless EIS is supported. If EIS is supported
3240         //it could either be 4 or 5 depending on the stream and video size
3241         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3242             if (setEis) {
3243                 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3244                     is_type = isTypePreview;
3245                 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3246                     if ( (m_ISTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3247                         LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
3248                         is_type = IS_TYPE_EIS_2_0;
3249                     } else {
3250                         is_type = m_ISTypeVideo;
3251                     }
3252                 } else {
3253                     is_type = IS_TYPE_NONE;
3254                 }
3255                  mStreamConfigInfo.is_type[i] = is_type;
3256             } else {
3257                  mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3258             }
3259         }
3260 
3261         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3262                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3263 
3264         char prop[PROPERTY_VALUE_MAX];
3265         //Disable tintless only if the property is set to 0
3266         memset(prop, 0, sizeof(prop));
3267         property_get("persist.camera.tintless.enable", prop, "1");
3268         int32_t tintless_value = atoi(prop);
3269 
3270         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3271                 CAM_INTF_PARM_TINTLESS, tintless_value);
3272 
3273         //Disable CDS for HFR mode or if DIS/EIS is on.
3274         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3275         //after every configure_stream
3276         if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3277                 (m_bIsVideo)) {
3278             int32_t cds = CAM_CDS_MODE_OFF;
3279             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3280                     CAM_INTF_PARM_CDS_MODE, cds))
3281                 LOGE("Failed to disable CDS for HFR mode");
3282 
3283         }
3284 
3285         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3286             uint8_t* use_av_timer = NULL;
3287 
3288             if (m_debug_avtimer){
3289                 LOGI(" Enabling AV timer through setprop");
3290                 use_av_timer = &m_debug_avtimer;
3291                 m_bAVTimerEnabled = true;
3292             }
3293             else{
3294                 use_av_timer =
3295                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3296                 if (use_av_timer) {
3297                     m_bAVTimerEnabled = true;
3298                     LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
3299                 }
3300             }
3301 
3302             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3303                 rc = BAD_VALUE;
3304             }
3305         }
3306 
3307         setMobicat();
3308 
3309         /* Set fps and hfr mode while sending meta stream info so that sensor
3310          * can configure appropriate streaming mode */
3311         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3312         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3313         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3314         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3315             rc = setHalFpsRange(meta, mParameters);
3316             if (rc == NO_ERROR) {
3317                 int32_t max_fps =
3318                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3319                 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3320                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3321                 }
3322                 /* For HFR, more buffers are dequeued upfront to improve the performance */
3323                 if (mBatchSize) {
3324                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3325                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3326                 }
3327             }
3328             else {
3329                 LOGE("setHalFpsRange failed");
3330             }
3331         }
3332         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3333 
3334         if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3335             cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3336                     meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3337             rc = setVideoHdrMode(mParameters, vhdr);
3338             if (rc != NO_ERROR) {
3339                 LOGE("setVideoHDR is failed");
3340             }
3341         }
3342 
3343         if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
3344             uint8_t sensorModeFullFov =
3345                     meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
3346             LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
3347             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
3348                     sensorModeFullFov)) {
3349                 rc = BAD_VALUE;
3350             }
3351         }
3352         //TODO: validate the arguments, HSV scenemode should have only the
3353         //advertised fps ranges
3354 
3355         /*set the capture intent, hal version, tintless, stream info,
3356          *and disenable parameters to the backend*/
3357         LOGD("set_parms META_STREAM_INFO " );
3358         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3359             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
3360                     ", Format:%d is_type: %d",
3361                     mStreamConfigInfo.type[i],
3362                     mStreamConfigInfo.stream_sizes[i].width,
3363                     mStreamConfigInfo.stream_sizes[i].height,
3364                     mStreamConfigInfo.postprocess_mask[i],
3365                     mStreamConfigInfo.format[i],
3366                     mStreamConfigInfo.is_type[i]);
3367         }
3368 
3369         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3370                     mParameters);
3371         if (rc < 0) {
3372             LOGE("set_parms failed for hal version, stream info");
3373         }
3374 
3375     }
3376 
3377     pthread_mutex_unlock(&mMutex);
3378 
3379     return rc;
3380 }
3381 
3382 /*===========================================================================
3383  * FUNCTION   : isEISEnabled
3384  *
3385  * DESCRIPTION: Decide whether EIS should get enabled or not.
3386  *
3387  * PARAMETERS :
3388  *   @meta : request from framework to process
3389  *
3390  * RETURN     : true/false Whether EIS should be enabled
3391  *
3392  *==========================================================================*/
isEISEnabled(const CameraMetadata & meta)3393 bool QCamera3HardwareInterface::isEISEnabled(const CameraMetadata& meta) {
3394     uint8_t fwkVideoStabMode = 0;
3395     if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3396         fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3397     }
3398 
3399     // If EIS setprop is enabled then only turn it on for video/preview
3400     return  m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
3401         (m_ISTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
3402 }
3403 
3404 /*===========================================================================
3405  * FUNCTION   : validateCaptureRequest
3406  *
3407  * DESCRIPTION: validate a capture request from camera service
3408  *
3409  * PARAMETERS :
3410  *   @request : request from framework to process
3411  *
3412  * RETURN     :
3413  *
3414  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)3415 int QCamera3HardwareInterface::validateCaptureRequest(
3416                     camera3_capture_request_t *request,
3417                     List<InternalRequest> &internallyRequestedStreams)
3418 {
3419     ssize_t idx = 0;
3420     const camera3_stream_buffer_t *b;
3421     CameraMetadata meta;
3422 
3423     /* Sanity check the request */
3424     if (request == NULL) {
3425         LOGE("NULL capture request");
3426         return BAD_VALUE;
3427     }
3428 
3429     if ((request->settings == NULL) && (mState == CONFIGURED)) {
3430         /*settings cannot be null for the first request*/
3431         return BAD_VALUE;
3432     }
3433 
3434     uint32_t frameNumber = request->frame_number;
3435     if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3436             && (internallyRequestedStreams.size() == 0)) {
3437         LOGE("Request %d: No output buffers provided!",
3438                 __FUNCTION__, frameNumber);
3439         return BAD_VALUE;
3440     }
3441     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3442         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3443                  request->num_output_buffers, MAX_NUM_STREAMS);
3444         return BAD_VALUE;
3445     }
3446     if (request->input_buffer != NULL) {
3447         b = request->input_buffer;
3448         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3449             LOGE("Request %d: Buffer %ld: Status not OK!",
3450                      frameNumber, (long)idx);
3451             return BAD_VALUE;
3452         }
3453         if (b->release_fence != -1) {
3454             LOGE("Request %d: Buffer %ld: Has a release fence!",
3455                      frameNumber, (long)idx);
3456             return BAD_VALUE;
3457         }
3458         if (b->buffer == NULL) {
3459             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3460                      frameNumber, (long)idx);
3461             return BAD_VALUE;
3462         }
3463     }
3464 
3465     // Validate all buffers
3466     b = request->output_buffers;
3467     if (b == NULL) {
3468        return BAD_VALUE;
3469     }
3470     while (idx < (ssize_t)request->num_output_buffers) {
3471         QCamera3ProcessingChannel *channel =
3472                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3473         if (channel == NULL) {
3474             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3475                      frameNumber, (long)idx);
3476             return BAD_VALUE;
3477         }
3478         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3479             LOGE("Request %d: Buffer %ld: Status not OK!",
3480                      frameNumber, (long)idx);
3481             return BAD_VALUE;
3482         }
3483         if (b->release_fence != -1) {
3484             LOGE("Request %d: Buffer %ld: Has a release fence!",
3485                      frameNumber, (long)idx);
3486             return BAD_VALUE;
3487         }
3488         if (b->buffer == NULL) {
3489             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3490                      frameNumber, (long)idx);
3491             return BAD_VALUE;
3492         }
3493         if (*(b->buffer) == NULL) {
3494             LOGE("Request %d: Buffer %ld: NULL private handle!",
3495                      frameNumber, (long)idx);
3496             return BAD_VALUE;
3497         }
3498         idx++;
3499         b = request->output_buffers + idx;
3500     }
3501     return NO_ERROR;
3502 }
3503 
3504 /*===========================================================================
3505  * FUNCTION   : deriveMinFrameDuration
3506  *
3507  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3508  *              on currently configured streams.
3509  *
3510  * PARAMETERS : NONE
3511  *
3512  * RETURN     : NONE
3513  *
3514  *==========================================================================*/
deriveMinFrameDuration()3515 void QCamera3HardwareInterface::deriveMinFrameDuration()
3516 {
3517     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3518     bool hasRaw = false;
3519 
3520     mMinRawFrameDuration = 0;
3521     mMinJpegFrameDuration = 0;
3522     mMinProcessedFrameDuration = 0;
3523 
3524     maxJpegDim = 0;
3525     maxProcessedDim = 0;
3526     maxRawDim = 0;
3527 
3528     // Figure out maximum jpeg, processed, and raw dimensions
3529     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3530         it != mStreamInfo.end(); it++) {
3531 
3532         // Input stream doesn't have valid stream_type
3533         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3534             continue;
3535 
3536         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3537         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3538             if (dimension > maxJpegDim)
3539                 maxJpegDim = dimension;
3540         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3541                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3542                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3543             hasRaw = true;
3544             if (dimension > maxRawDim)
3545                 maxRawDim = dimension;
3546         } else {
3547             if (dimension > maxProcessedDim)
3548                 maxProcessedDim = dimension;
3549         }
3550     }
3551 
3552     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3553             MAX_SIZES_CNT);
3554 
3555     //Assume all jpeg dimensions are in processed dimensions.
3556     if (maxJpegDim > maxProcessedDim)
3557         maxProcessedDim = maxJpegDim;
3558     //Find the smallest raw dimension that is greater or equal to jpeg dimension
3559     if (hasRaw && maxProcessedDim > maxRawDim) {
3560         maxRawDim = INT32_MAX;
3561 
3562         for (size_t i = 0; i < count; i++) {
3563             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3564                     gCamCapability[mCameraId]->raw_dim[i].height;
3565             if (dimension >= maxProcessedDim && dimension < maxRawDim)
3566                 maxRawDim = dimension;
3567         }
3568     }
3569 
3570     //Find minimum durations for processed, jpeg, and raw
3571     for (size_t i = 0; i < count; i++) {
3572         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3573                 gCamCapability[mCameraId]->raw_dim[i].height) {
3574             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3575             break;
3576         }
3577     }
3578     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3579     for (size_t i = 0; i < count; i++) {
3580         if (maxProcessedDim ==
3581                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3582                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3583             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3584             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3585             break;
3586         }
3587     }
3588 }
3589 
3590 /*===========================================================================
3591  * FUNCTION   : getMinFrameDuration
3592  *
3593  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3594  *              and current request configuration.
3595  *
3596  * PARAMETERS : @request: requset sent by the frameworks
3597  *
3598  * RETURN     : min farme duration for a particular request
3599  *
3600  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)3601 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3602 {
3603     bool hasJpegStream = false;
3604     bool hasRawStream = false;
3605     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3606         const camera3_stream_t *stream = request->output_buffers[i].stream;
3607         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3608             hasJpegStream = true;
3609         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3610                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3611                 stream->format == HAL_PIXEL_FORMAT_RAW16)
3612             hasRawStream = true;
3613     }
3614 
3615     if (!hasJpegStream)
3616         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3617     else
3618         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3619 }
3620 
3621 /*===========================================================================
3622  * FUNCTION   : handleBuffersDuringFlushLock
3623  *
3624  * DESCRIPTION: Account for buffers returned from back-end during flush
3625  *              This function is executed while mMutex is held by the caller.
3626  *
3627  * PARAMETERS :
3628  *   @buffer: image buffer for the callback
3629  *
3630  * RETURN     :
3631  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)3632 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3633 {
3634     bool buffer_found = false;
3635     for (List<PendingBuffersInRequest>::iterator req =
3636             mPendingBuffersMap.mPendingBuffersInRequest.begin();
3637             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3638         for (List<PendingBufferInfo>::iterator i =
3639                 req->mPendingBufferList.begin();
3640                 i != req->mPendingBufferList.end(); i++) {
3641             if (i->buffer == buffer->buffer) {
3642                 mPendingBuffersMap.numPendingBufsAtFlush--;
3643                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3644                     buffer->buffer, req->frame_number,
3645                     mPendingBuffersMap.numPendingBufsAtFlush);
3646                 buffer_found = true;
3647                 break;
3648             }
3649         }
3650         if (buffer_found) {
3651             break;
3652         }
3653     }
3654     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3655         //signal the flush()
3656         LOGD("All buffers returned to HAL. Continue flush");
3657         pthread_cond_signal(&mBuffersCond);
3658     }
3659 }
3660 
3661 /*===========================================================================
3662  * FUNCTION   : handleBatchMetadata
3663  *
3664  * DESCRIPTION: Handles metadata buffer callback in batch mode
3665  *
3666  * PARAMETERS : @metadata_buf: metadata buffer
3667  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3668  *                 the meta buf in this method
3669  *
3670  * RETURN     :
3671  *
3672  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3673 void QCamera3HardwareInterface::handleBatchMetadata(
3674         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3675 {
3676     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
3677 
3678     if (NULL == metadata_buf) {
3679         LOGE("metadata_buf is NULL");
3680         return;
3681     }
3682     /* In batch mode, the metdata will contain the frame number and timestamp of
3683      * the last frame in the batch. Eg: a batch containing buffers from request
3684      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3685      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3686      * multiple process_capture_results */
3687     metadata_buffer_t *metadata =
3688             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3689     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3690     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3691     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3692     uint32_t frame_number = 0, urgent_frame_number = 0;
3693     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3694     bool invalid_metadata = false;
3695     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3696     size_t loopCount = 1;
3697     bool is_metabuf_queued = false;
3698 
3699     int32_t *p_frame_number_valid =
3700             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3701     uint32_t *p_frame_number =
3702             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3703     int64_t *p_capture_time =
3704             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3705     int32_t *p_urgent_frame_number_valid =
3706             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3707     uint32_t *p_urgent_frame_number =
3708             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3709 
3710     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3711             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3712             (NULL == p_urgent_frame_number)) {
3713         LOGE("Invalid metadata");
3714         invalid_metadata = true;
3715     } else {
3716         frame_number_valid = *p_frame_number_valid;
3717         last_frame_number = *p_frame_number;
3718         last_frame_capture_time = *p_capture_time;
3719         urgent_frame_number_valid = *p_urgent_frame_number_valid;
3720         last_urgent_frame_number = *p_urgent_frame_number;
3721     }
3722 
3723     /* In batchmode, when no video buffers are requested, set_parms are sent
3724      * for every capture_request. The difference between consecutive urgent
3725      * frame numbers and frame numbers should be used to interpolate the
3726      * corresponding frame numbers and time stamps */
3727     pthread_mutex_lock(&mMutex);
3728     if (urgent_frame_number_valid) {
3729         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3730         if(idx < 0) {
3731             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3732                 last_urgent_frame_number);
3733             mState = ERROR;
3734             pthread_mutex_unlock(&mMutex);
3735             return;
3736         }
3737         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3738         urgentFrameNumDiff = last_urgent_frame_number + 1 -
3739                 first_urgent_frame_number;
3740 
3741         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3742                  urgent_frame_number_valid,
3743                 first_urgent_frame_number, last_urgent_frame_number);
3744     }
3745 
3746     if (frame_number_valid) {
3747         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3748         if(idx < 0) {
3749             LOGE("Invalid frame number received: %d. Irrecoverable error",
3750                 last_frame_number);
3751             mState = ERROR;
3752             pthread_mutex_unlock(&mMutex);
3753             return;
3754         }
3755         first_frame_number = mPendingBatchMap.valueAt(idx);
3756         frameNumDiff = last_frame_number + 1 -
3757                 first_frame_number;
3758         mPendingBatchMap.removeItem(last_frame_number);
3759 
3760         LOGD("frm: valid: %d frm_num: %d - %d",
3761                  frame_number_valid,
3762                 first_frame_number, last_frame_number);
3763 
3764     }
3765     pthread_mutex_unlock(&mMutex);
3766 
3767     if (urgent_frame_number_valid || frame_number_valid) {
3768         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3769         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3770             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3771                      urgentFrameNumDiff, last_urgent_frame_number);
3772         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3773             LOGE("frameNumDiff: %d frameNum: %d",
3774                      frameNumDiff, last_frame_number);
3775     }
3776 
3777     for (size_t i = 0; i < loopCount; i++) {
3778         /* handleMetadataWithLock is called even for invalid_metadata for
3779          * pipeline depth calculation */
3780         if (!invalid_metadata) {
3781             /* Infer frame number. Batch metadata contains frame number of the
3782              * last frame */
3783             if (urgent_frame_number_valid) {
3784                 if (i < urgentFrameNumDiff) {
3785                     urgent_frame_number =
3786                             first_urgent_frame_number + i;
3787                     LOGD("inferred urgent frame_number: %d",
3788                              urgent_frame_number);
3789                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3790                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3791                 } else {
3792                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3793                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3794                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3795                 }
3796             }
3797 
3798             /* Infer frame number. Batch metadata contains frame number of the
3799              * last frame */
3800             if (frame_number_valid) {
3801                 if (i < frameNumDiff) {
3802                     frame_number = first_frame_number + i;
3803                     LOGD("inferred frame_number: %d", frame_number);
3804                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3805                             CAM_INTF_META_FRAME_NUMBER, frame_number);
3806                 } else {
3807                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3808                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3809                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3810                 }
3811             }
3812 
3813             if (last_frame_capture_time) {
3814                 //Infer timestamp
3815                 first_frame_capture_time = last_frame_capture_time -
3816                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3817                 capture_time =
3818                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3819                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3820                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3821                 LOGD("batch capture_time: %lld, capture_time: %lld",
3822                          last_frame_capture_time, capture_time);
3823             }
3824         }
3825         pthread_mutex_lock(&mMutex);
3826         handleMetadataWithLock(metadata_buf,
3827                 false /* free_and_bufdone_meta_buf */,
3828                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3829                 (i == frameNumDiff-1), /* last metadata in the batch metadata */
3830                 &is_metabuf_queued /* if metabuf isqueued or not */);
3831         pthread_mutex_unlock(&mMutex);
3832     }
3833 
3834     /* BufDone metadata buffer */
3835     if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
3836         mMetadataChannel->bufDone(metadata_buf);
3837         free(metadata_buf);
3838         metadata_buf = NULL;
3839     }
3840 }
3841 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3842 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3843         camera3_error_msg_code_t errorCode)
3844 {
3845     camera3_notify_msg_t notify_msg;
3846     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3847     notify_msg.type = CAMERA3_MSG_ERROR;
3848     notify_msg.message.error.error_code = errorCode;
3849     notify_msg.message.error.error_stream = NULL;
3850     notify_msg.message.error.frame_number = frameNumber;
3851     orchestrateNotify(&notify_msg);
3852 
3853     return;
3854 }
3855 
3856 /*===========================================================================
3857  * FUNCTION   : sendPartialMetadataWithLock
3858  *
3859  * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3860  *
3861  * PARAMETERS : @metadata: metadata buffer
3862  *              @requestIter: The iterator for the pending capture request for
3863  *              which the partial result is being sen
3864  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3865  *                  last urgent metadata in a batch. Always true for non-batch mode
3866  *              @isJumpstartMetadata: Whether this is a partial metadata for
3867  *              jumpstart, i.e. even though it doesn't map to a valid partial
3868  *              frame number, its metadata entries should be kept.
3869  *
3870  * RETURN     :
3871  *
3872  *==========================================================================*/
3873 
sendPartialMetadataWithLock(metadata_buffer_t * metadata,const pendingRequestIterator requestIter,bool lastUrgentMetadataInBatch,bool isJumpstartMetadata)3874 void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3875         metadata_buffer_t *metadata,
3876         const pendingRequestIterator requestIter,
3877         bool lastUrgentMetadataInBatch,
3878         bool isJumpstartMetadata)
3879 {
3880     camera3_capture_result_t result;
3881     memset(&result, 0, sizeof(camera3_capture_result_t));
3882 
3883     requestIter->partial_result_cnt++;
3884 
3885     // Extract 3A metadata
3886     result.result = translateCbUrgentMetadataToResultMetadata(
3887             metadata, lastUrgentMetadataInBatch, requestIter,
3888             isJumpstartMetadata);
3889     // Populate metadata result
3890     result.frame_number = requestIter->frame_number;
3891     result.num_output_buffers = 0;
3892     result.output_buffers = NULL;
3893     result.partial_result = requestIter->partial_result_cnt;
3894 
3895     {
3896         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3897         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3898             // Notify HDR+ client about the partial metadata.
3899             gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3900             result.partial_result == PARTIAL_RESULT_COUNT);
3901         }
3902     }
3903 
3904     orchestrateResult(&result);
3905     LOGD("urgent frame_number = %u", result.frame_number);
3906     free_camera_metadata((camera_metadata_t *)result.result);
3907 }
3908 
3909 /*===========================================================================
3910  * FUNCTION   : handleMetadataWithLock
3911  *
3912  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3913  *
3914  * PARAMETERS : @metadata_buf: metadata buffer
3915  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3916  *                 the meta buf in this method
3917  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3918  *                  last urgent metadata in a batch. Always true for non-batch mode
3919  *              @lastMetadataInBatch: Boolean to indicate whether this is the
3920  *                  last metadata in a batch. Always true for non-batch mode
3921  *              @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3922  *                  buffer is enqueued or not.
3923  *
3924  * RETURN     :
3925  *
3926  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch,bool * p_is_metabuf_queued)3927 void QCamera3HardwareInterface::handleMetadataWithLock(
3928     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3929     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3930     bool *p_is_metabuf_queued)
3931 {
3932     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
3933     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3934         //during flush do not send metadata from this thread
3935         LOGD("not sending metadata during flush or when mState is error");
3936         if (free_and_bufdone_meta_buf) {
3937             mMetadataChannel->bufDone(metadata_buf);
3938             free(metadata_buf);
3939         }
3940         return;
3941     }
3942 
3943     //not in flush
3944     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3945     int32_t frame_number_valid, urgent_frame_number_valid;
3946     uint32_t frame_number, urgent_frame_number;
3947     int64_t capture_time, capture_time_av;
3948     nsecs_t currentSysTime;
3949 
3950     int32_t *p_frame_number_valid =
3951             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3952     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3953     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3954     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3955     int32_t *p_urgent_frame_number_valid =
3956             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3957     uint32_t *p_urgent_frame_number =
3958             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3959     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3960             metadata) {
3961         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3962                  *p_frame_number_valid, *p_frame_number);
3963     }
3964 
3965     camera_metadata_t *resultMetadata = nullptr;
3966 
3967     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3968             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3969         LOGE("Invalid metadata");
3970         if (free_and_bufdone_meta_buf) {
3971             mMetadataChannel->bufDone(metadata_buf);
3972             free(metadata_buf);
3973         }
3974         goto done_metadata;
3975     }
3976     frame_number_valid =        *p_frame_number_valid;
3977     frame_number =              *p_frame_number;
3978     capture_time =              *p_capture_time;
3979     capture_time_av =           *p_capture_time_av;
3980     urgent_frame_number_valid = *p_urgent_frame_number_valid;
3981     urgent_frame_number =       *p_urgent_frame_number;
3982     currentSysTime =            systemTime(CLOCK_MONOTONIC);
3983 
3984     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3985         const int tries = 3;
3986         nsecs_t bestGap, measured;
3987         for (int i = 0; i < tries; ++i) {
3988             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3989             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3990             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3991             const nsecs_t gap = tmono2 - tmono;
3992             if (i == 0 || gap < bestGap) {
3993                 bestGap = gap;
3994                 measured = tbase - ((tmono + tmono2) >> 1);
3995             }
3996         }
3997         capture_time -= measured;
3998     }
3999 
4000     // Detect if buffers from any requests are overdue
4001     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4002         int64_t timeout;
4003         {
4004             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4005             // If there is a pending HDR+ request, the following requests may be blocked until the
4006             // HDR+ request is done. So allow a longer timeout.
4007             timeout = (mHdrPlusPendingRequests.size() > 0) ?
4008                     MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
4009             timeout = s2ns(timeout);
4010             if (timeout < mExpectedInflightDuration) {
4011                 timeout = mExpectedInflightDuration;
4012             }
4013         }
4014 
4015         if ((currentSysTime - req.timestamp) > timeout) {
4016             for (auto &missed : req.mPendingBufferList) {
4017                 assert(missed.stream->priv);
4018                 if (missed.stream->priv) {
4019                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
4020                     assert(ch->mStreams[0]);
4021                     if (ch->mStreams[0]) {
4022                         LOGE("Cancel missing frame = %d, buffer = %p,"
4023                             "stream type = %d, stream format = %d",
4024                             req.frame_number, missed.buffer,
4025                             ch->mStreams[0]->getMyType(), missed.stream->format);
4026                         ch->timeoutFrame(req.frame_number);
4027                     }
4028                 }
4029             }
4030         }
4031     }
4032     //For the very first metadata callback, regardless whether it contains valid
4033     //frame number, send the partial metadata for the jumpstarting requests.
4034     //Note that this has to be done even if the metadata doesn't contain valid
4035     //urgent frame number, because in the case only 1 request is ever submitted
4036     //to HAL, there won't be subsequent valid urgent frame number.
4037     if (mFirstMetadataCallback) {
4038         for (pendingRequestIterator i =
4039                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4040             if (i->bUseFirstPartial) {
4041                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4042                         true /*isJumpstartMetadata*/);
4043             }
4044         }
4045         mFirstMetadataCallback = false;
4046     }
4047 
4048     //Partial result on process_capture_result for timestamp
4049     if (urgent_frame_number_valid) {
4050         LOGD("valid urgent frame_number = %u", urgent_frame_number);
4051 
4052         //Recieved an urgent Frame Number, handle it
4053         //using partial results
4054         for (pendingRequestIterator i =
4055                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4056             LOGD("Iterator Frame = %d urgent frame = %d",
4057                  i->frame_number, urgent_frame_number);
4058 
4059             if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
4060                     (i->partial_result_cnt == 0)) {
4061                 LOGE("Error: HAL missed urgent metadata for frame number %d",
4062                          i->frame_number);
4063                 i->partialResultDropped = true;
4064                 i->partial_result_cnt++;
4065             }
4066 
4067             if (i->frame_number == urgent_frame_number &&
4068                      i->partial_result_cnt == 0) {
4069                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4070                         false /*isJumpstartMetadata*/);
4071                 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
4072                     // Instant AEC settled for this frame.
4073                     LOGH("instant AEC settled for frame number %d", urgent_frame_number);
4074                     mInstantAECSettledFrameNumber = urgent_frame_number;
4075                 }
4076                 break;
4077             }
4078         }
4079     }
4080 
4081     if (!frame_number_valid) {
4082         LOGD("Not a valid normal frame number, used as SOF only");
4083         if (free_and_bufdone_meta_buf) {
4084             mMetadataChannel->bufDone(metadata_buf);
4085             free(metadata_buf);
4086         }
4087         goto done_metadata;
4088     }
4089     LOGH("valid frame_number = %u, capture_time = %lld",
4090             frame_number, capture_time);
4091 
4092     handleDepthDataLocked(metadata->depth_data, frame_number,
4093             metadata->is_depth_data_valid);
4094 
4095     // Check whether any stream buffer corresponding to this is dropped or not
4096     // If dropped, then send the ERROR_BUFFER for the corresponding stream
4097     // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
4098     for (auto & pendingRequest : mPendingRequestsList) {
4099         if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
4100                     mInstantAECSettledFrameNumber)) {
4101             camera3_notify_msg_t notify_msg = {};
4102             for (auto & buffer : pendingRequest.buffers) {
4103                 bool dropFrame = false;
4104                 QCamera3ProcessingChannel *channel =
4105                         (QCamera3ProcessingChannel *)buffer.stream->priv;
4106                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4107                 if (p_cam_frame_drop) {
4108                     for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
4109                         if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
4110                             // Got the stream ID for drop frame.
4111                             dropFrame = true;
4112                             break;
4113                         }
4114                     }
4115                 } else {
4116                     // This is instant AEC case.
4117                     // For instant AEC drop the stream untill AEC is settled.
4118                     dropFrame = true;
4119                 }
4120 
4121                 if (dropFrame) {
4122                     // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
4123                     if (p_cam_frame_drop) {
4124                         // Treat msg as error for system buffer drops
4125                         LOGE("Start of reporting error frame#=%u, streamID=%u",
4126                                  pendingRequest.frame_number, streamID);
4127                     } else {
4128                         // For instant AEC, inform frame drop and frame number
4129                         LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
4130                                 "AEC settled frame number = %u",
4131                                 pendingRequest.frame_number, streamID,
4132                                 mInstantAECSettledFrameNumber);
4133                     }
4134                     notify_msg.type = CAMERA3_MSG_ERROR;
4135                     notify_msg.message.error.frame_number = pendingRequest.frame_number;
4136                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
4137                     notify_msg.message.error.error_stream = buffer.stream;
4138                     orchestrateNotify(&notify_msg);
4139                     if (p_cam_frame_drop) {
4140                         // Treat msg as error for system buffer drops
4141                         LOGE("End of reporting error frame#=%u, streamID=%u",
4142                                 pendingRequest.frame_number, streamID);
4143                     } else {
4144                         // For instant AEC, inform frame drop and frame number
4145                         LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
4146                                 "AEC settled frame number = %u",
4147                                 pendingRequest.frame_number, streamID,
4148                                 mInstantAECSettledFrameNumber);
4149                     }
4150                     PendingFrameDropInfo PendingFrameDrop;
4151                     PendingFrameDrop.frame_number = pendingRequest.frame_number;
4152                     PendingFrameDrop.stream_ID = streamID;
4153                     // Add the Frame drop info to mPendingFrameDropList
4154                     mPendingFrameDropList.push_back(PendingFrameDrop);
4155                 }
4156             }
4157         }
4158     }
4159 
4160     for (auto & pendingRequest : mPendingRequestsList) {
4161         // Find the pending request with the frame number.
4162         if (pendingRequest.frame_number < frame_number) {
4163             // Workaround for case where shutter is missing due to dropped
4164             // metadata
4165             if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
4166                 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
4167             }
4168         } else if (pendingRequest.frame_number == frame_number) {
4169             // Update the sensor timestamp.
4170             pendingRequest.timestamp = capture_time;
4171 
4172 
4173             /* Set the timestamp in display metadata so that clients aware of
4174                private_handle such as VT can use this un-modified timestamps.
4175                Camera framework is unaware of this timestamp and cannot change this */
4176             updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
4177 
4178             // Find channel requiring metadata, meaning internal offline postprocess
4179             // is needed.
4180             //TODO: for now, we don't support two streams requiring metadata at the same time.
4181             // (because we are not making copies, and metadata buffer is not reference counted.
4182             bool internalPproc = false;
4183             for (pendingBufferIterator iter = pendingRequest.buffers.begin();
4184                     iter != pendingRequest.buffers.end(); iter++) {
4185                 if (iter->need_metadata) {
4186                     internalPproc = true;
4187                     QCamera3ProcessingChannel *channel =
4188                             (QCamera3ProcessingChannel *)iter->stream->priv;
4189 
4190                     if (iter->need_crop) {
4191                         QCamera3Stream *stream = channel->getStreamByIndex(0);
4192 
4193                         // Map the EIS crop to respective stream crop and append it.
4194                         IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA,
4195                                 metadata) {
4196                             for (int j = 0; j < crop_data->num_of_streams; j++) {
4197                                 if ((stream != nullptr) &&
4198                                         (stream->getMyServerID() ==
4199                                          crop_data->crop_info[j].stream_id)) {
4200 
4201                                     cam_dimension_t streamDim;
4202                                     if (stream->getFrameDimension(streamDim) != NO_ERROR) {
4203                                         LOGE("%s: Failed obtaining stream dimensions!", __func__);
4204                                         continue;
4205                                     }
4206 
4207                                     mStreamCropMapper.update(
4208                                             gCamCapability[mCameraId]->active_array_size.width,
4209                                             gCamCapability[mCameraId]->active_array_size.height,
4210                                             streamDim.width, streamDim.height);
4211 
4212                                     cam_eis_crop_info_t eisCrop = iter->crop_info;
4213                                     //eisCrop already combines zoom_ratio, no
4214                                     //need to apply it again.
4215                                     mStreamCropMapper.toSensor(eisCrop.delta_x, eisCrop.delta_y,
4216                                             eisCrop.delta_width, eisCrop.delta_height, 1.0f);
4217 
4218                                     int32_t crop[4] = {
4219                                         crop_data->crop_info[j].crop.left   + eisCrop.delta_x,
4220                                         crop_data->crop_info[j].crop.top    + eisCrop.delta_y,
4221                                         crop_data->crop_info[j].crop.width  - eisCrop.delta_width,
4222                                         crop_data->crop_info[j].crop.height - eisCrop.delta_height
4223                                     };
4224 
4225                                     if (isCropValid(crop[0], crop[1], crop[2], crop[3],
4226                                                 streamDim.width, streamDim.height)) {
4227                                         crop_data->crop_info[j].crop.left   = crop[0];
4228                                         crop_data->crop_info[j].crop.top    = crop[1];
4229                                         crop_data->crop_info[j].crop.width  = crop[2];
4230                                         crop_data->crop_info[j].crop.height = crop[3];
4231                                     } else {
4232                                         LOGE("Invalid EIS compensated crop region");
4233                                     }
4234 
4235                                     break;
4236                                 }
4237                             }
4238                         }
4239                     }
4240 
4241                     channel->queueReprocMetadata(metadata_buf);
4242                     if(p_is_metabuf_queued != NULL) {
4243                         *p_is_metabuf_queued = true;
4244                     }
4245                     iter->need_metadata = false;
4246                     break;
4247                 }
4248             }
4249             for (auto itr = pendingRequest.internalRequestList.begin();
4250                   itr != pendingRequest.internalRequestList.end(); itr++) {
4251                 if (itr->need_metadata) {
4252                     internalPproc = true;
4253                     QCamera3ProcessingChannel *channel =
4254                             (QCamera3ProcessingChannel *)itr->stream->priv;
4255                     channel->queueReprocMetadata(metadata_buf);
4256                     break;
4257                 }
4258             }
4259 
4260             saveExifParams(metadata);
4261 
4262             bool *enableZsl = nullptr;
4263             if (gExposeEnableZslKey) {
4264                 enableZsl = &pendingRequest.enableZsl;
4265             }
4266 
4267             resultMetadata = translateFromHalMetadata(metadata,
4268                     pendingRequest, internalPproc,
4269                     lastMetadataInBatch, enableZsl);
4270 
4271             updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
4272 
4273             if (pendingRequest.blob_request) {
4274                 //Dump tuning metadata if enabled and available
4275                 char prop[PROPERTY_VALUE_MAX];
4276                 memset(prop, 0, sizeof(prop));
4277                 property_get("persist.camera.dumpmetadata", prop, "0");
4278                 int32_t enabled = atoi(prop);
4279                 if (enabled && metadata->is_tuning_params_valid) {
4280                     dumpMetadataToFile(metadata->tuning_params,
4281                            mMetaFrameCount,
4282                            enabled,
4283                            "Snapshot",
4284                            frame_number);
4285                 }
4286             }
4287 
4288             if (!internalPproc) {
4289                 LOGD("couldn't find need_metadata for this metadata");
4290                 // Return metadata buffer
4291                 if (free_and_bufdone_meta_buf) {
4292                     mMetadataChannel->bufDone(metadata_buf);
4293                     free(metadata_buf);
4294                 }
4295             }
4296 
4297             break;
4298         }
4299     }
4300 
4301     mShutterDispatcher.markShutterReady(frame_number, capture_time);
4302 
4303     // Try to send out capture result metadata.
4304     handlePendingResultMetadataWithLock(frame_number,  resultMetadata);
4305     return;
4306 
4307 done_metadata:
4308     for (pendingRequestIterator i = mPendingRequestsList.begin();
4309             i != mPendingRequestsList.end() ;i++) {
4310         i->pipeline_depth++;
4311     }
4312     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4313     unblockRequestIfNecessary();
4314 }
4315 
4316 /*===========================================================================
4317  * FUNCTION   : handleDepthDataWithLock
4318  *
4319  * DESCRIPTION: Handles incoming depth data
4320  *
4321  * PARAMETERS : @depthData  : Depth data
4322  *              @frameNumber: Frame number of the incoming depth data
4323  *              @valid      : Valid flag for the incoming data
4324  *
4325  * RETURN     :
4326  *
4327  *==========================================================================*/
handleDepthDataLocked(const cam_depth_data_t & depthData,uint32_t frameNumber,uint8_t valid)4328 void QCamera3HardwareInterface::handleDepthDataLocked(
4329         const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
4330     uint32_t currentFrameNumber;
4331     buffer_handle_t *depthBuffer;
4332 
4333     if (nullptr == mDepthChannel) {
4334         return;
4335     }
4336 
4337     camera3_stream_buffer_t resultBuffer =
4338         {.stream = mDepthChannel->getStream(),
4339          .buffer = nullptr,
4340          .status = CAMERA3_BUFFER_STATUS_OK,
4341          .acquire_fence = -1,
4342          .release_fence = -1,
4343         };
4344     do {
4345         depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
4346         if (nullptr == depthBuffer) {
4347             break;
4348         }
4349 
4350         resultBuffer.buffer = depthBuffer;
4351         if (currentFrameNumber == frameNumber) {
4352             if (valid) {
4353                 int32_t rc = mDepthChannel->populateDepthData(depthData,
4354                         frameNumber);
4355                 if (NO_ERROR != rc) {
4356                     resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4357                 } else {
4358                     resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
4359                 }
4360             } else {
4361                 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4362             }
4363         } else if (currentFrameNumber > frameNumber) {
4364             break;
4365         } else {
4366             camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
4367                     {{currentFrameNumber, mDepthChannel->getStream(),
4368                             CAMERA3_MSG_ERROR_BUFFER}}};
4369             orchestrateNotify(&notify_msg);
4370 
4371             LOGE("Depth buffer for frame number: %d is missing "
4372                     "returning back!", currentFrameNumber);
4373             resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4374         }
4375         mDepthChannel->unmapBuffer(currentFrameNumber);
4376         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4377     } while (currentFrameNumber < frameNumber);
4378 }
4379 
4380 /*===========================================================================
4381  * FUNCTION   : notifyErrorFoPendingDepthData
4382  *
4383  * DESCRIPTION: Returns error for any pending depth buffers
4384  *
4385  * PARAMETERS : depthCh - depth channel that needs to get flushed
4386  *
4387  * RETURN     :
4388  *
4389  *==========================================================================*/
notifyErrorFoPendingDepthData(QCamera3DepthChannel * depthCh)4390 void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4391         QCamera3DepthChannel *depthCh) {
4392     uint32_t currentFrameNumber;
4393     buffer_handle_t *depthBuffer;
4394 
4395     if (nullptr == depthCh) {
4396         return;
4397     }
4398 
4399     camera3_notify_msg_t notify_msg =
4400         {.type = CAMERA3_MSG_ERROR,
4401                 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4402     camera3_stream_buffer_t resultBuffer =
4403         {.stream = depthCh->getStream(),
4404          .buffer = nullptr,
4405          .status = CAMERA3_BUFFER_STATUS_ERROR,
4406          .acquire_fence = -1,
4407          .release_fence = -1,};
4408 
4409     while (nullptr !=
4410             (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4411         depthCh->unmapBuffer(currentFrameNumber);
4412 
4413         notify_msg.message.error.frame_number = currentFrameNumber;
4414         orchestrateNotify(&notify_msg);
4415 
4416         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4417     };
4418 }
4419 
4420 /*===========================================================================
4421  * FUNCTION   : hdrPlusPerfLock
4422  *
4423  * DESCRIPTION: perf lock for HDR+ using custom intent
4424  *
4425  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4426  *
4427  * RETURN     : None
4428  *
4429  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)4430 void QCamera3HardwareInterface::hdrPlusPerfLock(
4431         mm_camera_super_buf_t *metadata_buf)
4432 {
4433     if (NULL == metadata_buf) {
4434         LOGE("metadata_buf is NULL");
4435         return;
4436     }
4437     metadata_buffer_t *metadata =
4438             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4439     int32_t *p_frame_number_valid =
4440             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4441     uint32_t *p_frame_number =
4442             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4443 
4444     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4445         LOGE("%s: Invalid metadata", __func__);
4446         return;
4447     }
4448 
4449     //acquire perf lock for 2 secs after the last HDR frame is captured
4450     constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
4451     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4452         if ((p_frame_number != NULL) &&
4453                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
4454             mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
4455         }
4456     }
4457 }
4458 
4459 /*===========================================================================
4460  * FUNCTION   : handleInputBufferWithLock
4461  *
4462  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4463  *
4464  * PARAMETERS : @frame_number: frame number of the input buffer
4465  *
4466  * RETURN     :
4467  *
4468  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)4469 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4470 {
4471     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
4472     pendingRequestIterator i = mPendingRequestsList.begin();
4473     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4474         i++;
4475     }
4476     if (i != mPendingRequestsList.end() && i->input_buffer) {
4477         //found the right request
4478         CameraMetadata settings;
4479         nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4480         if(i->settings) {
4481             settings = i->settings;
4482             if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4483                 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
4484             } else {
4485                 LOGE("No timestamp in input settings! Using current one.");
4486             }
4487         } else {
4488             LOGE("Input settings missing!");
4489         }
4490 
4491         mShutterDispatcher.markShutterReady(frame_number, capture_time);
4492         LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4493                     i->frame_number, capture_time);
4494 
4495         camera3_capture_result result;
4496         memset(&result, 0, sizeof(camera3_capture_result));
4497         result.frame_number = frame_number;
4498         result.result = i->settings;
4499         result.input_buffer = i->input_buffer;
4500         result.partial_result = PARTIAL_RESULT_COUNT;
4501 
4502         orchestrateResult(&result);
4503         LOGD("Input request metadata and input buffer frame_number = %u",
4504                         i->frame_number);
4505         i = erasePendingRequest(i);
4506 
4507         // Dispatch result metadata that may be just unblocked by this reprocess result.
4508         dispatchResultMetadataWithLock(frame_number, REPROCESS, false/*isHdrPlus*/);
4509     } else {
4510         LOGE("Could not find input request for frame number %d", frame_number);
4511     }
4512 }
4513 
4514 /*===========================================================================
4515  * FUNCTION   : handleBufferWithLock
4516  *
4517  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4518  *
4519  * PARAMETERS : @buffer: image buffer for the callback
4520  *              @frame_number: frame number of the image buffer
4521  *
4522  * RETURN     :
4523  *
4524  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)4525 void QCamera3HardwareInterface::handleBufferWithLock(
4526     camera3_stream_buffer_t *buffer, uint32_t frame_number)
4527 {
4528     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
4529 
4530     if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4531         mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4532     }
4533 
4534     /* Nothing to be done during error state */
4535     if ((ERROR == mState) || (DEINIT == mState)) {
4536         return;
4537     }
4538     if (mFlushPerf) {
4539         handleBuffersDuringFlushLock(buffer);
4540         return;
4541     }
4542     //not in flush
4543     // If the frame number doesn't exist in the pending request list,
4544     // directly send the buffer to the frameworks, and update pending buffers map
4545     // Otherwise, book-keep the buffer.
4546     pendingRequestIterator i = mPendingRequestsList.begin();
4547     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4548         i++;
4549     }
4550 
4551     if (i != mPendingRequestsList.end()) {
4552         if (i->input_buffer) {
4553             // For a reprocessing request, try to send out result metadata.
4554             handlePendingResultMetadataWithLock(frame_number, nullptr);
4555         }
4556     }
4557 
4558     // Check if this frame was dropped.
4559     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4560             m != mPendingFrameDropList.end(); m++) {
4561         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4562         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4563         if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4564             buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4565             LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4566                      frame_number, streamID);
4567             m = mPendingFrameDropList.erase(m);
4568             break;
4569         }
4570     }
4571 
4572     // WAR for encoder avtimer timestamp issue
4573     QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4574     if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4575         m_bAVTimerEnabled) {
4576         for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4577             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4578             if (req->frame_number != frame_number)
4579                 continue;
4580             if(req->av_timestamp == 0) {
4581                 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4582             }
4583             else {
4584                 struct private_handle_t *priv_handle =
4585                     (struct private_handle_t *) (*(buffer->buffer));
4586                 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4587             }
4588         }
4589     }
4590 
4591     buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4592     LOGH("result frame_number = %d, buffer = %p",
4593              frame_number, buffer->buffer);
4594 
4595     if (buffer->status == CAMERA3_BUFFER_STATUS_ERROR) {
4596         camera3_notify_msg_t notify_msg = {};
4597         notify_msg.type = CAMERA3_MSG_ERROR;
4598         notify_msg.message.error.frame_number = frame_number;
4599         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
4600         notify_msg.message.error.error_stream = buffer->stream;
4601         orchestrateNotify(&notify_msg);
4602     }
4603 
4604     mPendingBuffersMap.removeBuf(buffer->buffer);
4605     mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4606 
4607     if (mPreviewStarted == false) {
4608         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4609         if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
4610             logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4611 
4612             mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4613             mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4614             mPreviewStarted = true;
4615 
4616             // Set power hint for preview
4617             mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4618         }
4619     }
4620 }
4621 
removeUnrequestedMetadata(pendingRequestIterator requestIter,camera_metadata_t * resultMetadata)4622 void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4623         camera_metadata_t *resultMetadata) {
4624     CameraMetadata metadata;
4625     metadata.acquire(resultMetadata);
4626 
4627     // Remove len shading map if it's not requested.
4628     if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4629             metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4630             metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0] !=
4631             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4632         metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4633         metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4634             &requestIter->requestedLensShadingMapMode, 1);
4635     }
4636 
4637     // Remove face information if it's not requested.
4638     if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4639             metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4640             metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4641             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4642         metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4643         metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4644                 &requestIter->requestedFaceDetectMode, 1);
4645     }
4646 
4647     requestIter->resultMetadata = metadata.release();
4648 }
4649 
handlePendingResultMetadataWithLock(uint32_t frameNumber,camera_metadata_t * resultMetadata)4650 void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
4651         camera_metadata_t *resultMetadata)
4652 {
4653     // Find the pending request for this result metadata.
4654     auto requestIter = mPendingRequestsList.begin();
4655     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4656         requestIter++;
4657     }
4658 
4659     if (requestIter == mPendingRequestsList.end()) {
4660         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4661         return;
4662     }
4663 
4664     // Update the result metadata
4665     requestIter->resultMetadata = resultMetadata;
4666 
4667     // Check what type of request this is.
4668     RequestType requestType = (requestIter->input_buffer != nullptr) ?  REPROCESS :
4669             (isStillZsl(*requestIter) ? ZSL : NORMAL);
4670     if (requestIter->hdrplus) {
4671         // HDR+ request doesn't have partial results.
4672         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4673     } else if (requestType == REPROCESS) {
4674         // Reprocessing request result is the same as settings.
4675         requestIter->resultMetadata = requestIter->settings;
4676         // Reprocessing request doesn't have partial results.
4677         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4678     } else {
4679         if ((requestIter->partial_result_cnt == 0) && !requestIter->partialResultDropped) {
4680             LOGE("Urgent metadata for frame number: %d didn't arrive!", frameNumber);
4681             requestIter->partialResultDropped = true;
4682         }
4683         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4684         mPendingLiveRequest--;
4685 
4686         {
4687             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
4688             // For a live request, send the metadata to HDR+ client.
4689             if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4690                 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4691                     requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4692             }
4693         }
4694     }
4695 
4696     if (requestType != REPROCESS) {
4697         removeUnrequestedMetadata(requestIter, resultMetadata);
4698     }
4699 
4700     dispatchResultMetadataWithLock(frameNumber, requestType, requestIter->hdrplus);
4701 }
4702 
dispatchResultMetadataWithLock(uint32_t frameNumber,RequestType requestType,bool isHdrPlus)4703 void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4704         RequestType requestType, bool isHdrPlus) {
4705     // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4706     // to be sent if all previous pending requests are ready to be sent.
4707     bool readyToSend = true;
4708 
4709     // Iterate through the pending requests to send out result metadata that are ready. Also if
4710     // this result metadata belongs to a live request, notify errors for previous live requests
4711     // that don't have result metadata yet.
4712     // Note: a live request is either a NORMAL request, or a ZSL non-hdrplus request.
4713     bool isLiveRequest = requestType != REPROCESS && !isHdrPlus;
4714     auto iter = mPendingRequestsList.begin();
4715     while (iter != mPendingRequestsList.end()) {
4716         bool thisIsStillZsl = isStillZsl(*iter);
4717         RequestType thisRequestType = (iter->input_buffer != nullptr) ? REPROCESS :
4718                 (thisIsStillZsl ? ZSL : NORMAL);
4719         if (thisRequestType != requestType) {
4720             iter++;
4721             continue;
4722         }
4723         // Check if current pending request is ready. If it's not ready, the following pending
4724         // requests are also not ready.
4725         readyToSend &= iter->resultMetadata != nullptr;
4726 
4727         bool thisLiveRequest = !iter->hdrplus && iter->input_buffer == nullptr;
4728         bool errorResult = false;
4729 
4730         camera3_capture_result_t result = {};
4731         result.frame_number = iter->frame_number;
4732         result.result = iter->resultMetadata;
4733         result.partial_result = iter->partial_result_cnt;
4734 
4735         // If this pending buffer has result metadata, we may be able to send it out.
4736         if (iter->resultMetadata != nullptr) {
4737             if (!readyToSend) {
4738                 // If any of the previous pending request is not ready, this pending request is
4739                 // also not ready to send in order to keep shutter callbacks and result metadata
4740                 // in order.
4741                 iter++;
4742                 continue;
4743             }
4744             // Notify ERROR_RESULT if partial result was dropped.
4745             errorResult = iter->partialResultDropped;
4746         } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
4747             // If the result metadata belongs to a live request, notify errors for previous pending
4748             // live requests.
4749             mPendingLiveRequest--;
4750 
4751             LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4752             errorResult = true;
4753         } else {
4754             iter++;
4755             continue;
4756         }
4757 
4758         if (errorResult) {
4759             // Check for any buffers that might be stuck in the post-process input queue
4760             // awaiting metadata and queue an empty meta buffer. The invalid data should
4761             // fail the offline post-process pass and return any buffers that otherwise
4762             // will become lost.
4763             for (auto it = iter->buffers.begin(); it != iter->buffers.end(); it++) {
4764                 if (it->need_metadata) {
4765                     QCamera3ProcessingChannel *channel =
4766                         reinterpret_cast<QCamera3ProcessingChannel *> (it->stream->priv);
4767                     if (channel != nullptr) {
4768                         LOGE("Dropped result: %d Unblocking any pending pp buffers!",
4769                                 iter->frame_number);
4770                         channel->queueReprocMetadata(nullptr);
4771                     }
4772                     it->need_metadata = false;
4773                     break;
4774                 }
4775             }
4776 
4777             notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4778         } else {
4779             result.output_buffers = nullptr;
4780             result.num_output_buffers = 0;
4781             orchestrateResult(&result);
4782         }
4783         // For reprocessing, result metadata is the same as settings so do not free it here to
4784         // avoid double free.
4785         if (result.result != iter->settings) {
4786             free_camera_metadata((camera_metadata_t *)result.result);
4787         }
4788         iter->resultMetadata = nullptr;
4789         iter = erasePendingRequest(iter);
4790     }
4791 
4792     if (isLiveRequest) {
4793         for (auto &iter : mPendingRequestsList) {
4794             // Increment pipeline depth for the following pending requests.
4795             if (iter.frame_number > frameNumber) {
4796                 iter.pipeline_depth++;
4797             }
4798         }
4799     }
4800 
4801     unblockRequestIfNecessary();
4802 }
4803 
4804 /*===========================================================================
4805  * FUNCTION   : unblockRequestIfNecessary
4806  *
4807  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4808  *              that mMutex is held when this function is called.
4809  *
4810  * PARAMETERS :
4811  *
4812  * RETURN     :
4813  *
4814  *==========================================================================*/
unblockRequestIfNecessary()4815 void QCamera3HardwareInterface::unblockRequestIfNecessary()
4816 {
4817    // Unblock process_capture_request
4818    pthread_cond_signal(&mRequestCond);
4819 }
4820 
4821 /*===========================================================================
4822  * FUNCTION   : isHdrSnapshotRequest
4823  *
4824  * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4825  *
4826  * PARAMETERS : camera3 request structure
4827  *
4828  * RETURN     : boolean decision variable
4829  *
4830  *==========================================================================*/
isHdrSnapshotRequest(camera3_capture_request * request)4831 bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4832 {
4833     if (request == NULL) {
4834         LOGE("Invalid request handle");
4835         assert(0);
4836         return false;
4837     }
4838 
4839     if (!mForceHdrSnapshot) {
4840         CameraMetadata frame_settings;
4841         frame_settings = request->settings;
4842 
4843         if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4844             uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4845             if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4846                 return false;
4847             }
4848         } else {
4849             return false;
4850         }
4851 
4852         if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4853             uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4854             if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4855                 return false;
4856             }
4857         } else {
4858             return false;
4859         }
4860     }
4861 
4862     for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4863         if (request->output_buffers[i].stream->format
4864                 == HAL_PIXEL_FORMAT_BLOB) {
4865             return true;
4866         }
4867     }
4868 
4869     return false;
4870 }
4871 /*===========================================================================
4872  * FUNCTION   : orchestrateRequest
4873  *
4874  * DESCRIPTION: Orchestrates a capture request from camera service
4875  *
4876  * PARAMETERS :
4877  *   @request : request from framework to process
4878  *
4879  * RETURN     : Error status codes
4880  *
4881  *==========================================================================*/
orchestrateRequest(camera3_capture_request_t * request)4882 int32_t QCamera3HardwareInterface::orchestrateRequest(
4883         camera3_capture_request_t *request)
4884 {
4885 
4886     uint32_t originalFrameNumber = request->frame_number;
4887     uint32_t originalOutputCount = request->num_output_buffers;
4888     const camera_metadata_t *original_settings = request->settings;
4889     List<InternalRequest> internallyRequestedStreams;
4890     List<InternalRequest> emptyInternalList;
4891 
4892     if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4893         LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4894         uint32_t internalFrameNumber;
4895         CameraMetadata modified_meta;
4896 
4897 
4898         /* Add Blob channel to list of internally requested streams */
4899         for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4900             if (request->output_buffers[i].stream->format
4901                     == HAL_PIXEL_FORMAT_BLOB) {
4902                 InternalRequest streamRequested;
4903                 streamRequested.meteringOnly = 1;
4904                 streamRequested.need_metadata = 0;
4905                 streamRequested.stream = request->output_buffers[i].stream;
4906                 internallyRequestedStreams.push_back(streamRequested);
4907             }
4908         }
4909         request->num_output_buffers = 0;
4910         auto itr =  internallyRequestedStreams.begin();
4911 
4912         /* Modify setting to set compensation */
4913         modified_meta = request->settings;
4914         int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4915         uint8_t aeLock = 1;
4916         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4917         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4918         camera_metadata_t *modified_settings = modified_meta.release();
4919         request->settings = modified_settings;
4920 
4921         /* Capture Settling & -2x frame */
4922         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4923         request->frame_number = internalFrameNumber;
4924         processCaptureRequest(request, internallyRequestedStreams);
4925 
4926         request->num_output_buffers = originalOutputCount;
4927         _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4928         request->frame_number = internalFrameNumber;
4929         processCaptureRequest(request, emptyInternalList);
4930         request->num_output_buffers = 0;
4931 
4932         modified_meta = modified_settings;
4933         expCompensation = 0;
4934         aeLock = 1;
4935         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4936         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4937         modified_settings = modified_meta.release();
4938         request->settings = modified_settings;
4939 
4940         /* Capture Settling & 0X frame */
4941 
4942         itr =  internallyRequestedStreams.begin();
4943         if (itr == internallyRequestedStreams.end()) {
4944             LOGE("Error Internally Requested Stream list is empty");
4945             assert(0);
4946         } else {
4947             itr->need_metadata = 0;
4948             itr->meteringOnly = 1;
4949         }
4950 
4951         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4952         request->frame_number = internalFrameNumber;
4953         processCaptureRequest(request, internallyRequestedStreams);
4954 
4955         itr =  internallyRequestedStreams.begin();
4956         if (itr == internallyRequestedStreams.end()) {
4957             ALOGE("Error Internally Requested Stream list is empty");
4958             assert(0);
4959         } else {
4960             itr->need_metadata = 1;
4961             itr->meteringOnly = 0;
4962         }
4963 
4964         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4965         request->frame_number = internalFrameNumber;
4966         processCaptureRequest(request, internallyRequestedStreams);
4967 
4968         /* Capture 2X frame*/
4969         modified_meta = modified_settings;
4970         expCompensation = GB_HDR_2X_STEP_EV;
4971         aeLock = 1;
4972         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4973         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4974         modified_settings = modified_meta.release();
4975         request->settings = modified_settings;
4976 
4977         itr =  internallyRequestedStreams.begin();
4978         if (itr == internallyRequestedStreams.end()) {
4979             ALOGE("Error Internally Requested Stream list is empty");
4980             assert(0);
4981         } else {
4982             itr->need_metadata = 0;
4983             itr->meteringOnly = 1;
4984         }
4985         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4986         request->frame_number = internalFrameNumber;
4987         processCaptureRequest(request, internallyRequestedStreams);
4988 
4989         itr =  internallyRequestedStreams.begin();
4990         if (itr == internallyRequestedStreams.end()) {
4991             ALOGE("Error Internally Requested Stream list is empty");
4992             assert(0);
4993         } else {
4994             itr->need_metadata = 1;
4995             itr->meteringOnly = 0;
4996         }
4997 
4998         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4999         request->frame_number = internalFrameNumber;
5000         processCaptureRequest(request, internallyRequestedStreams);
5001 
5002 
5003         /* Capture 2X on original streaming config*/
5004         internallyRequestedStreams.clear();
5005 
5006         /* Restore original settings pointer */
5007         request->settings = original_settings;
5008     } else {
5009         uint32_t internalFrameNumber;
5010         _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
5011         request->frame_number = internalFrameNumber;
5012         return processCaptureRequest(request, internallyRequestedStreams);
5013     }
5014 
5015     return NO_ERROR;
5016 }
5017 
5018 /*===========================================================================
5019  * FUNCTION   : orchestrateResult
5020  *
5021  * DESCRIPTION: Orchestrates a capture result to camera service
5022  *
5023  * PARAMETERS :
5024  *   @request : request from framework to process
5025  *
5026  * RETURN     :
5027  *
5028  *==========================================================================*/
orchestrateResult(camera3_capture_result_t * result)5029 void QCamera3HardwareInterface::orchestrateResult(
5030                     camera3_capture_result_t *result)
5031 {
5032     uint32_t frameworkFrameNumber;
5033     int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
5034             frameworkFrameNumber);
5035     if (rc != NO_ERROR) {
5036         LOGE("Cannot find translated frameworkFrameNumber");
5037         assert(0);
5038     } else {
5039         if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5040             LOGD("Internal Request drop the result");
5041         } else {
5042             if (result->result != NULL) {
5043                 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
5044                 camera_metadata_entry_t entry;
5045                 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
5046                 if (ret == OK) {
5047                     int64_t sync_frame_number = frameworkFrameNumber;
5048                     ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
5049                     if (ret != OK)
5050                         LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
5051                 }
5052             }
5053             result->frame_number = frameworkFrameNumber;
5054             LOGH("process_capture_result frame_number %d, result %p, partial %d", result->frame_number, result->result, result->partial_result);
5055             mCallbackOps->process_capture_result(mCallbackOps, result);
5056         }
5057     }
5058 }
5059 
5060 /*===========================================================================
5061  * FUNCTION   : orchestrateNotify
5062  *
5063  * DESCRIPTION: Orchestrates a notify to camera service
5064  *
5065  * PARAMETERS :
5066  *   @request : request from framework to process
5067  *
5068  * RETURN     :
5069  *
5070  *==========================================================================*/
orchestrateNotify(camera3_notify_msg_t * notify_msg)5071 void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
5072 {
5073     uint32_t frameworkFrameNumber;
5074     uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
5075     int32_t rc = NO_ERROR;
5076 
5077     rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
5078                                                           frameworkFrameNumber);
5079 
5080     if (rc != NO_ERROR) {
5081         if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
5082             LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
5083             frameworkFrameNumber = 0;
5084         } else {
5085             LOGE("Cannot find translated frameworkFrameNumber");
5086             assert(0);
5087             return;
5088         }
5089     }
5090 
5091     if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5092         LOGD("Internal Request drop the notifyCb");
5093     } else {
5094         notify_msg->message.shutter.frame_number = frameworkFrameNumber;
5095         mCallbackOps->notify(mCallbackOps, notify_msg);
5096     }
5097 }
5098 
5099 /*===========================================================================
5100  * FUNCTION   : FrameNumberRegistry
5101  *
5102  * DESCRIPTION: Constructor
5103  *
5104  * PARAMETERS :
5105  *
5106  * RETURN     :
5107  *
5108  *==========================================================================*/
FrameNumberRegistry()5109 FrameNumberRegistry::FrameNumberRegistry()
5110 {
5111     _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
5112 }
5113 
5114 /*===========================================================================
5115  * FUNCTION   : ~FrameNumberRegistry
5116  *
5117  * DESCRIPTION: Destructor
5118  *
5119  * PARAMETERS :
5120  *
5121  * RETURN     :
5122  *
5123  *==========================================================================*/
~FrameNumberRegistry()5124 FrameNumberRegistry::~FrameNumberRegistry()
5125 {
5126 }
5127 
5128 /*===========================================================================
5129  * FUNCTION   : PurgeOldEntriesLocked
5130  *
5131  * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
5132  *
5133  * PARAMETERS :
5134  *
5135  * RETURN     : NONE
5136  *
5137  *==========================================================================*/
purgeOldEntriesLocked()5138 void FrameNumberRegistry::purgeOldEntriesLocked()
5139 {
5140     while (_register.begin() != _register.end()) {
5141         auto itr = _register.begin();
5142         if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
5143             _register.erase(itr);
5144         } else {
5145             return;
5146         }
5147     }
5148 }
5149 
5150 /*===========================================================================
5151  * FUNCTION   : allocStoreInternalFrameNumber
5152  *
5153  * DESCRIPTION: Method to note down a framework request and associate a new
5154  *              internal request number against it
5155  *
5156  * PARAMETERS :
5157  *   @fFrameNumber: Identifier given by framework
5158  *   @internalFN  : Output parameter which will have the newly generated internal
5159  *                  entry
5160  *
5161  * RETURN     : Error code
5162  *
5163  *==========================================================================*/
allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,uint32_t & internalFrameNumber)5164 int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
5165                                                             uint32_t &internalFrameNumber)
5166 {
5167     Mutex::Autolock lock(mRegistryLock);
5168     internalFrameNumber = _nextFreeInternalNumber++;
5169     LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
5170     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
5171     purgeOldEntriesLocked();
5172     return NO_ERROR;
5173 }
5174 
5175 /*===========================================================================
5176  * FUNCTION   : generateStoreInternalFrameNumber
5177  *
5178  * DESCRIPTION: Method to associate a new internal request number independent
5179  *              of any associate with framework requests
5180  *
5181  * PARAMETERS :
5182  *   @internalFrame#: Output parameter which will have the newly generated internal
5183  *
5184  *
5185  * RETURN     : Error code
5186  *
5187  *==========================================================================*/
generateStoreInternalFrameNumber(uint32_t & internalFrameNumber)5188 int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
5189 {
5190     Mutex::Autolock lock(mRegistryLock);
5191     internalFrameNumber = _nextFreeInternalNumber++;
5192     LOGD("Generated internal framenumber:%d", internalFrameNumber);
5193     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
5194     purgeOldEntriesLocked();
5195     return NO_ERROR;
5196 }
5197 
5198 /*===========================================================================
5199  * FUNCTION   : getFrameworkFrameNumber
5200  *
5201  * DESCRIPTION: Method to query the framework framenumber given an internal #
5202  *
5203  * PARAMETERS :
5204  *   @internalFrame#: Internal reference
5205  *   @frameworkframenumber: Output parameter holding framework frame entry
5206  *
5207  * RETURN     : Error code
5208  *
5209  *==========================================================================*/
getFrameworkFrameNumber(uint32_t internalFrameNumber,uint32_t & frameworkFrameNumber)5210 int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
5211                                                      uint32_t &frameworkFrameNumber)
5212 {
5213     Mutex::Autolock lock(mRegistryLock);
5214     auto itr = _register.find(internalFrameNumber);
5215     if (itr == _register.end()) {
5216         LOGE("Cannot find internal#: %d", internalFrameNumber);
5217         return -ENOENT;
5218     }
5219 
5220     frameworkFrameNumber = itr->second;
5221     purgeOldEntriesLocked();
5222     return NO_ERROR;
5223 }
5224 
fillPbStreamConfig(pbcamera::StreamConfiguration * config,uint32_t pbStreamId,QCamera3Channel * channel,uint32_t streamIndex)5225 status_t QCamera3HardwareInterface::fillPbStreamConfig(
5226         pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
5227         uint32_t streamIndex) {
5228     if (config == nullptr) {
5229         LOGE("%s: config is null", __FUNCTION__);
5230         return BAD_VALUE;
5231     }
5232 
5233     if (channel == nullptr) {
5234         LOGE("%s: channel is null", __FUNCTION__);
5235         return BAD_VALUE;
5236     }
5237 
5238     QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
5239     if (stream == nullptr) {
5240         LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
5241         return NAME_NOT_FOUND;
5242     }
5243 
5244     const cam_stream_info_t* streamInfo = stream->getStreamInfo();
5245     if (streamInfo == nullptr) {
5246         LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
5247         return NAME_NOT_FOUND;
5248     }
5249 
5250     config->id = pbStreamId;
5251     config->image.width = streamInfo->dim.width;
5252     config->image.height = streamInfo->dim.height;
5253     config->image.padding = 0;
5254 
5255     int bytesPerPixel = 0;
5256 
5257     switch (streamInfo->fmt) {
5258         case CAM_FORMAT_YUV_420_NV21:
5259             config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5260             bytesPerPixel = 1;
5261             break;
5262         case CAM_FORMAT_YUV_420_NV12:
5263         case CAM_FORMAT_YUV_420_NV12_VENUS:
5264             config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5265             bytesPerPixel = 1;
5266             break;
5267         default:
5268             ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
5269             return BAD_VALUE;
5270     }
5271 
5272     uint32_t totalPlaneSize = 0;
5273 
5274     // Fill plane information.
5275     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
5276         pbcamera::PlaneConfiguration plane;
5277         plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
5278         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
5279         config->image.planes.push_back(plane);
5280 
5281         totalPlaneSize += (plane.stride * plane.scanline);
5282     }
5283 
5284     config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
5285     return OK;
5286 }
5287 
5288 /*===========================================================================
5289  * FUNCTION   : processCaptureRequest
5290  *
5291  * DESCRIPTION: process a capture request from camera service
5292  *
5293  * PARAMETERS :
5294  *   @request : request from framework to process
5295  *
5296  * RETURN     :
5297  *
5298  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)5299 int QCamera3HardwareInterface::processCaptureRequest(
5300                     camera3_capture_request_t *request,
5301                     List<InternalRequest> &internallyRequestedStreams)
5302 {
5303     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
5304     int rc = NO_ERROR;
5305     int32_t request_id;
5306     CameraMetadata meta;
5307     bool isVidBufRequested = false;
5308     camera3_stream_buffer_t *pInputBuffer = NULL;
5309 
5310     // If Easel is thermal throttled and there is no pending HDR+ request,
5311     // close HDR+ client.
5312     {
5313         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5314         if (gHdrPlusClient != nullptr && mEaselThermalThrottled) {
5315             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5316             if (mHdrPlusPendingRequests.empty()) {
5317                 closeHdrPlusClientLocked();
5318             }
5319         }
5320     }
5321 
5322     pthread_mutex_lock(&mMutex);
5323 
5324     // Validate current state
5325     switch (mState) {
5326         case CONFIGURED:
5327         case STARTED:
5328             /* valid state */
5329             break;
5330 
5331         case ERROR:
5332             pthread_mutex_unlock(&mMutex);
5333             handleCameraDeviceError();
5334             return -ENODEV;
5335 
5336         default:
5337             LOGE("Invalid state %d", mState);
5338             pthread_mutex_unlock(&mMutex);
5339             return -ENODEV;
5340     }
5341 
5342     rc = validateCaptureRequest(request, internallyRequestedStreams);
5343     if (rc != NO_ERROR) {
5344         LOGE("incoming request is not valid");
5345         pthread_mutex_unlock(&mMutex);
5346         return rc;
5347     }
5348 
5349     meta = request->settings;
5350 
5351     if (mState == CONFIGURED) {
5352         logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
5353 
5354         // For HFR first capture request, send capture intent, and
5355         // stream on all streams
5356         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) && mBatchSize) {
5357             int32_t hal_version = CAM_HAL_V3;
5358             uint8_t captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5359             clear_metadata_buffer(mParameters);
5360             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
5361             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
5362             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
5363             if (rc < 0) {
5364                 LOGE("set_parms for for capture intent failed");
5365                 pthread_mutex_unlock(&mMutex);
5366                 return rc;
5367             }
5368         }
5369 
5370         uint8_t nrMode = 0;
5371         if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5372             nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5373         }
5374 
5375         cam_is_type_t is_type = IS_TYPE_NONE;
5376         bool setEis = isEISEnabled(meta);
5377         cam_sensor_mode_info_t sensorModeInfo = {};
5378         rc = getSensorModeInfo(sensorModeInfo);
5379         if (rc != NO_ERROR) {
5380             LOGE("Failed to get sensor output size");
5381             pthread_mutex_unlock(&mMutex);
5382             goto error_exit;
5383         }
5384 
5385         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5386                 gCamCapability[mCameraId]->active_array_size.height,
5387                 sensorModeInfo.active_array_size.width,
5388                 sensorModeInfo.active_array_size.height);
5389 
5390         /* Set batchmode before initializing channel. Since registerBuffer
5391          * internally initializes some of the channels, better set batchmode
5392          * even before first register buffer */
5393         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5394             it != mStreamInfo.end(); it++) {
5395             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5396             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5397                     && mBatchSize) {
5398                 rc = channel->setBatchSize(mBatchSize);
5399                 //Disable per frame map unmap for HFR/batchmode case
5400                 rc |= channel->setPerFrameMapUnmap(false);
5401                 if (NO_ERROR != rc) {
5402                     LOGE("Channel init failed %d", rc);
5403                     pthread_mutex_unlock(&mMutex);
5404                     goto error_exit;
5405                 }
5406             }
5407         }
5408 
5409         //First initialize all streams
5410         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5411             it != mStreamInfo.end(); it++) {
5412             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5413 
5414             /* Initial value of NR mode is needed before stream on */
5415             channel->setNRMode(nrMode);
5416             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5417                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
5418                setEis) {
5419                 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5420                     if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5421                         is_type = mStreamConfigInfo.is_type[i];
5422                         break;
5423                     }
5424                 }
5425                 rc = channel->initialize(is_type);
5426             } else {
5427                 rc = channel->initialize(IS_TYPE_NONE);
5428             }
5429             if (NO_ERROR != rc) {
5430                 LOGE("Channel initialization failed %d", rc);
5431                 pthread_mutex_unlock(&mMutex);
5432                 goto error_exit;
5433             }
5434         }
5435 
5436         if (mRawDumpChannel) {
5437             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5438             if (rc != NO_ERROR) {
5439                 LOGE("Error: Raw Dump Channel init failed");
5440                 pthread_mutex_unlock(&mMutex);
5441                 goto error_exit;
5442             }
5443         }
5444         if (mHdrPlusRawSrcChannel) {
5445             rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5446             if (rc != NO_ERROR) {
5447                 LOGE("Error: HDR+ RAW Source Channel init failed");
5448                 pthread_mutex_unlock(&mMutex);
5449                 goto error_exit;
5450             }
5451         }
5452         if (mSupportChannel) {
5453             rc = mSupportChannel->initialize(IS_TYPE_NONE);
5454             if (rc < 0) {
5455                 LOGE("Support channel initialization failed");
5456                 pthread_mutex_unlock(&mMutex);
5457                 goto error_exit;
5458             }
5459         }
5460         if (mAnalysisChannel) {
5461             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5462             if (rc < 0) {
5463                 LOGE("Analysis channel initialization failed");
5464                 pthread_mutex_unlock(&mMutex);
5465                 goto error_exit;
5466             }
5467         }
5468         if (mDummyBatchChannel) {
5469             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5470             if (rc < 0) {
5471                 LOGE("mDummyBatchChannel setBatchSize failed");
5472                 pthread_mutex_unlock(&mMutex);
5473                 goto error_exit;
5474             }
5475             rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
5476             if (rc < 0) {
5477                 LOGE("mDummyBatchChannel initialization failed");
5478                 pthread_mutex_unlock(&mMutex);
5479                 goto error_exit;
5480             }
5481         }
5482 
5483         // Set bundle info
5484         rc = setBundleInfo();
5485         if (rc < 0) {
5486             LOGE("setBundleInfo failed %d", rc);
5487             pthread_mutex_unlock(&mMutex);
5488             goto error_exit;
5489         }
5490 
5491         //update settings from app here
5492         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5493             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5494             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5495         }
5496         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5497             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5498             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5499         }
5500         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5501             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5502             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5503 
5504             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5505                 (mLinkedCameraId != mCameraId) ) {
5506                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5507                     mLinkedCameraId, mCameraId);
5508                 pthread_mutex_unlock(&mMutex);
5509                 goto error_exit;
5510             }
5511         }
5512 
5513         // add bundle related cameras
5514         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5515         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5516             cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5517                     &m_pDualCamCmdPtr->bundle_info;
5518             m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
5519             if (mIsDeviceLinked)
5520                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5521             else
5522                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5523 
5524             pthread_mutex_lock(&gCamLock);
5525 
5526             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5527                 LOGE("Dualcam: Invalid Session Id ");
5528                 pthread_mutex_unlock(&gCamLock);
5529                 pthread_mutex_unlock(&mMutex);
5530                 goto error_exit;
5531             }
5532 
5533             if (mIsMainCamera == 1) {
5534                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5535                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
5536                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5537                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
5538                 // related session id should be session id of linked session
5539                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5540             } else {
5541                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5542                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
5543                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5544                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
5545                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5546             }
5547             m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
5548             pthread_mutex_unlock(&gCamLock);
5549 
5550             rc = mCameraHandle->ops->set_dual_cam_cmd(
5551                     mCameraHandle->camera_handle);
5552             if (rc < 0) {
5553                 LOGE("Dualcam: link failed");
5554                 pthread_mutex_unlock(&mMutex);
5555                 goto error_exit;
5556             }
5557         }
5558         goto no_error;
5559 error_exit:
5560         mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
5561         return rc;
5562 no_error:
5563         mWokenUpByDaemon = false;
5564         mPendingLiveRequest = 0;
5565         mFirstConfiguration = false;
5566     }
5567 
5568     uint32_t frameNumber = request->frame_number;
5569     cam_stream_ID_t streamsArray;
5570 
5571     if (mFlushPerf) {
5572         //we cannot accept any requests during flush
5573         LOGE("process_capture_request cannot proceed during flush");
5574         pthread_mutex_unlock(&mMutex);
5575         return NO_ERROR; //should return an error
5576     }
5577 
5578     if (meta.exists(ANDROID_REQUEST_ID)) {
5579         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5580         mCurrentRequestId = request_id;
5581         LOGD("Received request with id: %d", request_id);
5582     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5583         LOGE("Unable to find request id field, \
5584                 & no previous id available");
5585         pthread_mutex_unlock(&mMutex);
5586         return NAME_NOT_FOUND;
5587     } else {
5588         LOGD("Re-using old request id");
5589         request_id = mCurrentRequestId;
5590     }
5591 
5592     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5593                                     request->num_output_buffers,
5594                                     request->input_buffer,
5595                                     frameNumber);
5596     // Acquire all request buffers first
5597     streamsArray.num_streams = 0;
5598     int blob_request = 0;
5599     bool depthRequestPresent = false;
5600     uint32_t snapshotStreamId = 0;
5601     for (size_t i = 0; i < request->num_output_buffers; i++) {
5602         const camera3_stream_buffer_t& output = request->output_buffers[i];
5603         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5604 
5605         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5606                 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
5607             //FIXME??:Call function to store local copy of jpeg data for encode params.
5608             blob_request = 1;
5609             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5610         }
5611 
5612         if (output.acquire_fence != -1) {
5613            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5614            close(output.acquire_fence);
5615            if (rc != OK) {
5616               LOGE("sync wait failed %d", rc);
5617               pthread_mutex_unlock(&mMutex);
5618               return rc;
5619            }
5620         }
5621 
5622         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5623                 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
5624             depthRequestPresent = true;
5625             continue;
5626         }
5627 
5628         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5629             channel->getStreamID(channel->getStreamTypeMask());
5630 
5631         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5632             isVidBufRequested = true;
5633         }
5634     }
5635 
5636     //FIXME: Add checks to ensure to dups in validateCaptureRequest
5637     for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5638           itr++) {
5639         QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5640         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5641             channel->getStreamID(channel->getStreamTypeMask());
5642 
5643         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5644             isVidBufRequested = true;
5645         }
5646     }
5647 
5648     if (blob_request) {
5649         ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
5650         mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
5651     }
5652     if (blob_request && mRawDumpChannel) {
5653         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
5654         streamsArray.stream_request[streamsArray.num_streams].streamID =
5655             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
5656         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5657     }
5658 
5659     {
5660         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5661         // Request a RAW buffer if
5662         //  1. mHdrPlusRawSrcChannel is valid.
5663         //  2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5664         //  3. There is no pending HDR+ request.
5665         if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5666                 mHdrPlusPendingRequests.size() == 0) {
5667             streamsArray.stream_request[streamsArray.num_streams].streamID =
5668                 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5669             streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5670         }
5671     }
5672 
5673     //extract capture intent
5674     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5675         mCaptureIntent =
5676                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5677     }
5678 
5679     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5680         mCacMode =
5681                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5682     }
5683 
5684     uint8_t requestedLensShadingMapMode;
5685     // Get the shading map mode.
5686     if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5687         mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5688                 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5689     } else {
5690         requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5691     }
5692 
5693     if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5694         mLastRequestedFaceDetectMode =
5695                 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5696     }
5697 
5698     if (meta.exists(ANDROID_STATISTICS_OIS_DATA_MODE)) {
5699         mLastRequestedOisDataMode =
5700                 meta.find(ANDROID_STATISTICS_OIS_DATA_MODE).data.u8[0];
5701     }
5702 
5703     bool hdrPlusRequest = false;
5704     HdrPlusPendingRequest pendingHdrPlusRequest = {};
5705 
5706     {
5707         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5708         // If this request has a still capture intent, try to submit an HDR+ request.
5709         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5710                 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5711             hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5712         }
5713     }
5714 
5715     if (hdrPlusRequest) {
5716         // For a HDR+ request, just set the frame parameters.
5717         rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5718         if (rc < 0) {
5719             LOGE("fail to set frame parameters");
5720             pthread_mutex_unlock(&mMutex);
5721             return rc;
5722         }
5723     } else if(request->input_buffer == NULL) {
5724         /* Parse the settings:
5725          * - For every request in NORMAL MODE
5726          * - For every request in HFR mode during preview only case
5727          * - For first request of every batch in HFR mode during video
5728          * recording. In batchmode the same settings except frame number is
5729          * repeated in each request of the batch.
5730          */
5731         if (!mBatchSize ||
5732            (mBatchSize && !isVidBufRequested) ||
5733            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
5734             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5735             if (rc < 0) {
5736                 LOGE("fail to set frame parameters");
5737                 pthread_mutex_unlock(&mMutex);
5738                 return rc;
5739             }
5740 
5741             {
5742                 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5743                 // will be included in the result metadata sent to Easel HDR+.
5744                 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5745                 if (mHdrPlusModeEnabled) {
5746                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5747                         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5748                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5749                         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5750                 }
5751             }
5752         }
5753         /* For batchMode HFR, setFrameParameters is not called for every
5754          * request. But only frame number of the latest request is parsed.
5755          * Keep track of first and last frame numbers in a batch so that
5756          * metadata for the frame numbers of batch can be duplicated in
5757          * handleBatchMetadta */
5758         if (mBatchSize) {
5759             if (!mToBeQueuedVidBufs) {
5760                 //start of the batch
5761                 mFirstFrameNumberInBatch = request->frame_number;
5762             }
5763             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5764                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5765                 LOGE("Failed to set the frame number in the parameters");
5766                 pthread_mutex_unlock(&mMutex);
5767                 return BAD_VALUE;
5768             }
5769         }
5770         if (mNeedSensorRestart) {
5771             /* Unlock the mutex as restartSensor waits on the channels to be
5772              * stopped, which in turn calls stream callback functions -
5773              * handleBufferWithLock and handleMetadataWithLock */
5774             pthread_mutex_unlock(&mMutex);
5775             rc = dynamicUpdateMetaStreamInfo();
5776             if (rc != NO_ERROR) {
5777                 LOGE("Restarting the sensor failed");
5778                 return BAD_VALUE;
5779             }
5780             mNeedSensorRestart = false;
5781             pthread_mutex_lock(&mMutex);
5782         }
5783         if(mResetInstantAEC) {
5784             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5785                     CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5786             mResetInstantAEC = false;
5787         }
5788     } else {
5789         if (request->input_buffer->acquire_fence != -1) {
5790            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5791            close(request->input_buffer->acquire_fence);
5792            if (rc != OK) {
5793               LOGE("input buffer sync wait failed %d", rc);
5794               pthread_mutex_unlock(&mMutex);
5795               return rc;
5796            }
5797         }
5798     }
5799 
5800     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5801         mLastCustIntentFrmNum = frameNumber;
5802     }
5803     /* Update pending request list and pending buffers map */
5804     PendingRequestInfo pendingRequest = {};
5805     pendingRequestIterator latestRequest;
5806     pendingRequest.frame_number = frameNumber;
5807     pendingRequest.num_buffers = depthRequestPresent ?
5808             (request->num_output_buffers - 1 ) : request->num_output_buffers;
5809     pendingRequest.request_id = request_id;
5810     pendingRequest.blob_request = blob_request;
5811     pendingRequest.timestamp = 0;
5812     pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
5813     pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
5814     pendingRequest.requestedOisDataMode = mLastRequestedOisDataMode;
5815     pendingRequest.zoomRatio = mLastRequestedZoomRatio;
5816     if (request->input_buffer) {
5817         pendingRequest.input_buffer =
5818                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5819         *(pendingRequest.input_buffer) = *(request->input_buffer);
5820         pInputBuffer = pendingRequest.input_buffer;
5821     } else {
5822        pendingRequest.input_buffer = NULL;
5823        pInputBuffer = NULL;
5824     }
5825     pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
5826 
5827     pendingRequest.pipeline_depth = 0;
5828     pendingRequest.partial_result_cnt = 0;
5829     extractJpegMetadata(mCurJpegMeta, request);
5830     pendingRequest.jpegMetadata = mCurJpegMeta;
5831     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5832     pendingRequest.capture_intent = mCaptureIntent;
5833     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5834         pendingRequest.hybrid_ae_enable =
5835                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5836     }
5837 
5838     if (meta.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
5839         pendingRequest.motion_detection_enable =
5840                 meta.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8[0];
5841     }
5842 
5843     /* DevCamDebug metadata processCaptureRequest */
5844     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5845         mDevCamDebugMetaEnable =
5846                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5847     }
5848     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5849     /* DevCamDebug metadata end */
5850 
5851     //extract CAC info
5852     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5853         mCacMode =
5854                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5855     }
5856     pendingRequest.fwkCacMode = mCacMode;
5857     pendingRequest.hdrplus = hdrPlusRequest;
5858     // We need to account for several dropped frames initially on sensor side.
5859     pendingRequest.expectedFrameDuration = (mState == CONFIGURED) ? (4 * mExpectedFrameDuration) :
5860         mExpectedFrameDuration;
5861     mExpectedInflightDuration += pendingRequest.expectedFrameDuration;
5862 
5863     // extract enableZsl info
5864     if (gExposeEnableZslKey) {
5865         if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5866             pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5867             mZslEnabled = pendingRequest.enableZsl;
5868         } else {
5869             pendingRequest.enableZsl = mZslEnabled;
5870         }
5871     }
5872 
5873     PendingBuffersInRequest bufsForCurRequest;
5874     bufsForCurRequest.frame_number = frameNumber;
5875     // Mark current timestamp for the new request
5876     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
5877     bufsForCurRequest.av_timestamp = 0;
5878 
5879     if (hdrPlusRequest) {
5880         // Save settings for this request.
5881         pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5882         memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5883 
5884         // Add to pending HDR+ request queue.
5885         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5886         mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5887 
5888         ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5889     }
5890 
5891     buffer_handle_t *depth_buffer = nullptr;
5892     for (size_t i = 0; i < request->num_output_buffers; i++) {
5893         if ((request->output_buffers[i].stream->data_space ==
5894                 HAL_DATASPACE_DEPTH) &&
5895                 (HAL_PIXEL_FORMAT_BLOB ==
5896                         request->output_buffers[i].stream->format)) {
5897             depth_buffer = request->output_buffers[i].buffer;
5898             continue;
5899         }
5900         RequestedBufferInfo requestedBuf;
5901         memset(&requestedBuf, 0, sizeof(requestedBuf));
5902         requestedBuf.stream = request->output_buffers[i].stream;
5903         requestedBuf.buffer = NULL;
5904         pendingRequest.buffers.push_back(requestedBuf);
5905 
5906         // Add to buffer handle the pending buffers list
5907         PendingBufferInfo bufferInfo;
5908         bufferInfo.buffer = request->output_buffers[i].buffer;
5909         bufferInfo.stream = request->output_buffers[i].stream;
5910         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5911         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5912         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5913             frameNumber, bufferInfo.buffer,
5914             channel->getStreamTypeMask(), bufferInfo.stream->format);
5915     }
5916     // Add this request packet into mPendingBuffersMap
5917     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5918     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5919         mPendingBuffersMap.get_num_overall_buffers());
5920 
5921     latestRequest = mPendingRequestsList.insert(
5922             mPendingRequestsList.end(), pendingRequest);
5923 
5924     // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5925     // for the frame number.
5926     mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr,
5927             isStillZsl(pendingRequest));
5928     for (size_t i = 0; i < request->num_output_buffers; i++) {
5929         mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5930     }
5931 
5932     if(mFlush) {
5933         LOGI("mFlush is true");
5934 
5935         // If depth buffer is requested, return an error depth buffer. The buffer is not
5936         // going to be added to the depth channel so it won't be returned in
5937         // notifyErrorFoPendingDepthData().
5938         if (depth_buffer != nullptr) {
5939             camera3_stream_buffer_t errorBuffer =
5940             {
5941                 .stream = mDepthChannel->getStream(),
5942                 .buffer = depth_buffer,
5943                 .status = CAMERA3_BUFFER_STATUS_ERROR,
5944                 .acquire_fence = -1,
5945                 .release_fence = -1,
5946             };
5947 
5948             mOutputBufferDispatcher.markBufferReady(frameNumber, errorBuffer);
5949         }
5950 
5951         pthread_mutex_unlock(&mMutex);
5952         return NO_ERROR;
5953     }
5954 
5955     // If this is not an HDR+ request, send the request to metadata and each output buffer's
5956     // channel.
5957     if (!hdrPlusRequest) {
5958         int indexUsed;
5959         // Notify metadata channel we receive a request
5960         mMetadataChannel->request(NULL, frameNumber, indexUsed);
5961 
5962         if(request->input_buffer != NULL){
5963             LOGD("Input request, frame_number %d", frameNumber);
5964             rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5965             if (NO_ERROR != rc) {
5966                 LOGE("fail to set reproc parameters");
5967                 pthread_mutex_unlock(&mMutex);
5968                 return rc;
5969             }
5970         }
5971 
5972         // Call request on other streams
5973         uint32_t streams_need_metadata = 0;
5974         pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5975         for (size_t i = 0; i < request->num_output_buffers; i++) {
5976             const camera3_stream_buffer_t& output = request->output_buffers[i];
5977             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5978 
5979             if (channel == NULL) {
5980                 LOGW("invalid channel pointer for stream");
5981                 continue;
5982             }
5983 
5984             if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5985                 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5986                           output.buffer, request->input_buffer, frameNumber);
5987                 if(request->input_buffer != NULL){
5988                     rc = channel->request(output.buffer, frameNumber,
5989                             pInputBuffer, &mReprocMeta, indexUsed, false, false);
5990                     if (rc < 0) {
5991                         LOGE("Fail to request on picture channel");
5992                         pthread_mutex_unlock(&mMutex);
5993                         return rc;
5994                     }
5995                 } else {
5996                     if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5997                         assert(NULL != mDepthChannel);
5998                         assert(mDepthChannel == output.stream->priv);
5999 
6000                         rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
6001                         if (rc < 0) {
6002                             LOGE("Fail to map on depth buffer");
6003                             pthread_mutex_unlock(&mMutex);
6004                             return rc;
6005                         }
6006                         continue;
6007                     } else {
6008                         LOGD("snapshot request with buffer %p, frame_number %d",
6009                                  output.buffer, frameNumber);
6010                         if (!request->settings) {
6011                             rc = channel->request(output.buffer, frameNumber,
6012                                     NULL, mPrevParameters, indexUsed);
6013                         } else {
6014                             rc = channel->request(output.buffer, frameNumber,
6015                                     NULL, mParameters, indexUsed);
6016                         }
6017                         if (rc < 0) {
6018                             LOGE("Fail to request on picture channel");
6019                             pthread_mutex_unlock(&mMutex);
6020                             return rc;
6021                         }
6022 
6023                         uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6024                         uint32_t j = 0;
6025                         for (j = 0; j < streamsArray.num_streams; j++) {
6026                             if (streamsArray.stream_request[j].streamID == streamId) {
6027                                 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6028                                     streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6029                                 else
6030                                     streamsArray.stream_request[j].buf_index = indexUsed;
6031                                 break;
6032                             }
6033                         }
6034                         if (j == streamsArray.num_streams) {
6035                             LOGE("Did not find matching stream to update index");
6036                             assert(0);
6037                         }
6038 
6039                         pendingBufferIter->need_metadata = true;
6040 
6041                         if (isEISCropInSnapshotNeeded(meta)) {
6042                             pendingBufferIter->need_crop = true;
6043                             pendingBufferIter->crop_info = mLastEISCropInfo;
6044                         }
6045 
6046                         streams_need_metadata++;
6047                     }
6048                 }
6049             } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
6050                     output.stream->format == HAL_PIXEL_FORMAT_Y8) {
6051                 bool needMetadata = false;
6052                 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
6053                 rc = yuvChannel->request(output.buffer, frameNumber,
6054                         pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
6055                         needMetadata, indexUsed, false, false);
6056                 if (rc < 0) {
6057                     LOGE("Fail to request on YUV channel");
6058                     pthread_mutex_unlock(&mMutex);
6059                     return rc;
6060                 }
6061 
6062                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6063                 uint32_t j = 0;
6064                 for (j = 0; j < streamsArray.num_streams; j++) {
6065                     if (streamsArray.stream_request[j].streamID == streamId) {
6066                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6067                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6068                         else
6069                             streamsArray.stream_request[j].buf_index = indexUsed;
6070                         break;
6071                     }
6072                 }
6073                 if (j == streamsArray.num_streams) {
6074                     LOGE("Did not find matching stream to update index");
6075                     assert(0);
6076                 }
6077 
6078                 pendingBufferIter->need_metadata = needMetadata;
6079                 if (needMetadata)
6080                     streams_need_metadata += 1;
6081                 LOGD("calling YUV channel request, need_metadata is %d",
6082                          needMetadata);
6083             } else {
6084                 LOGD("request with buffer %p, frame_number %d",
6085                       output.buffer, frameNumber);
6086 
6087                 rc = channel->request(output.buffer, frameNumber, indexUsed);
6088 
6089                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6090                 uint32_t j = 0;
6091                 for (j = 0; j < streamsArray.num_streams; j++) {
6092                     if (streamsArray.stream_request[j].streamID == streamId) {
6093                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6094                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6095                         else
6096                             streamsArray.stream_request[j].buf_index = indexUsed;
6097                         break;
6098                     }
6099                 }
6100                 if (j == streamsArray.num_streams) {
6101                     LOGE("Did not find matching stream to update index");
6102                     assert(0);
6103                 }
6104 
6105                 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
6106                         && mBatchSize) {
6107                     mToBeQueuedVidBufs++;
6108                     if (mToBeQueuedVidBufs == mBatchSize) {
6109                         channel->queueBatchBuf();
6110                     }
6111                 }
6112                 if (rc < 0) {
6113                     LOGE("request failed");
6114                     pthread_mutex_unlock(&mMutex);
6115                     return rc;
6116                 }
6117             }
6118             pendingBufferIter++;
6119         }
6120 
6121         for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
6122               itr++) {
6123             QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
6124 
6125             if (channel == NULL) {
6126                 LOGE("invalid channel pointer for stream");
6127                 assert(0);
6128                 pthread_mutex_unlock(&mMutex);
6129                 return BAD_VALUE;
6130             }
6131 
6132             InternalRequest requestedStream;
6133             requestedStream = (*itr);
6134 
6135 
6136             if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
6137                 LOGD("snapshot request internally input buffer %p, frame_number %d",
6138                           request->input_buffer, frameNumber);
6139                 if(request->input_buffer != NULL){
6140                     rc = channel->request(NULL, frameNumber,
6141                             pInputBuffer, &mReprocMeta, indexUsed, true,
6142                             requestedStream.meteringOnly);
6143                     if (rc < 0) {
6144                         LOGE("Fail to request on picture channel");
6145                         pthread_mutex_unlock(&mMutex);
6146                         return rc;
6147                     }
6148                 } else {
6149                     LOGD("snapshot request with frame_number %d", frameNumber);
6150                     if (!request->settings) {
6151                         rc = channel->request(NULL, frameNumber,
6152                                 NULL, mPrevParameters, indexUsed, true,
6153                                 requestedStream.meteringOnly);
6154                     } else {
6155                         rc = channel->request(NULL, frameNumber,
6156                                 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
6157                     }
6158                     if (rc < 0) {
6159                         LOGE("Fail to request on picture channel");
6160                         pthread_mutex_unlock(&mMutex);
6161                         return rc;
6162                     }
6163 
6164                     if ((*itr).meteringOnly != 1) {
6165                         requestedStream.need_metadata = 1;
6166                         streams_need_metadata++;
6167                     }
6168                 }
6169 
6170                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6171                 uint32_t j = 0;
6172                 for (j = 0; j < streamsArray.num_streams; j++) {
6173                     if (streamsArray.stream_request[j].streamID == streamId) {
6174                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6175                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6176                       else
6177                           streamsArray.stream_request[j].buf_index = indexUsed;
6178                         break;
6179                     }
6180                 }
6181                 if (j == streamsArray.num_streams) {
6182                     LOGE("Did not find matching stream to update index");
6183                     assert(0);
6184                 }
6185 
6186             } else {
6187                 LOGE("Internal requests not supported on this stream type");
6188                 assert(0);
6189                 pthread_mutex_unlock(&mMutex);
6190                 return INVALID_OPERATION;
6191             }
6192             latestRequest->internalRequestList.push_back(requestedStream);
6193         }
6194 
6195         //If 2 streams have need_metadata set to true, fail the request, unless
6196         //we copy/reference count the metadata buffer
6197         if (streams_need_metadata > 1) {
6198             LOGE("not supporting request in which two streams requires"
6199                     " 2 HAL metadata for reprocessing");
6200             pthread_mutex_unlock(&mMutex);
6201             return -EINVAL;
6202         }
6203 
6204         cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
6205                 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
6206         if (depthRequestPresent && mDepthChannel) {
6207             if (request->settings) {
6208                 camera_metadata_ro_entry entry;
6209                 if (find_camera_metadata_ro_entry(request->settings,
6210                         NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
6211                     if (entry.data.u8[0]) {
6212                         pdafEnable = CAM_PD_DATA_ENABLED;
6213                     } else {
6214                         pdafEnable = CAM_PD_DATA_SKIP;
6215                     }
6216                     mDepthCloudMode = pdafEnable;
6217                 } else {
6218                     pdafEnable = mDepthCloudMode;
6219                 }
6220             } else {
6221                 pdafEnable = mDepthCloudMode;
6222             }
6223         }
6224 
6225         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
6226                 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
6227             LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
6228             pthread_mutex_unlock(&mMutex);
6229             return BAD_VALUE;
6230         }
6231 
6232         if (request->input_buffer == NULL) {
6233             /* Set the parameters to backend:
6234              * - For every request in NORMAL MODE
6235              * - For every request in HFR mode during preview only case
6236              * - Once every batch in HFR mode during video recording
6237              */
6238             if (!mBatchSize ||
6239                (mBatchSize && !isVidBufRequested) ||
6240                (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
6241                 LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
6242                          mBatchSize, isVidBufRequested,
6243                         mToBeQueuedVidBufs);
6244 
6245                 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
6246                     for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6247                         uint32_t m = 0;
6248                         for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6249                             if (streamsArray.stream_request[k].streamID ==
6250                                     mBatchedStreamsArray.stream_request[m].streamID)
6251                                 break;
6252                             }
6253                             if (m == mBatchedStreamsArray.num_streams) {
6254                                 mBatchedStreamsArray.stream_request\
6255                                     [mBatchedStreamsArray.num_streams].streamID =
6256                                     streamsArray.stream_request[k].streamID;
6257                                 mBatchedStreamsArray.stream_request\
6258                                     [mBatchedStreamsArray.num_streams].buf_index =
6259                                     streamsArray.stream_request[k].buf_index;
6260                                 mBatchedStreamsArray.num_streams =
6261                                     mBatchedStreamsArray.num_streams + 1;
6262                             }
6263                     }
6264                     streamsArray = mBatchedStreamsArray;
6265                 }
6266                 /* Update stream id of all the requested buffers */
6267                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6268                         streamsArray)) {
6269                     LOGE("Failed to set stream type mask in the parameters");
6270                     pthread_mutex_unlock(&mMutex);
6271                     return BAD_VALUE;
6272                 }
6273 
6274                 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6275                         mParameters);
6276                 if (rc < 0) {
6277                     LOGE("set_parms failed");
6278                 }
6279                 /* reset to zero coz, the batch is queued */
6280                 mToBeQueuedVidBufs = 0;
6281                 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6282                 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6283             } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
6284                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6285                     uint32_t m = 0;
6286                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6287                         if (streamsArray.stream_request[k].streamID ==
6288                                 mBatchedStreamsArray.stream_request[m].streamID)
6289                             break;
6290                     }
6291                     if (m == mBatchedStreamsArray.num_streams) {
6292                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6293                                 streamID = streamsArray.stream_request[k].streamID;
6294                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6295                                 buf_index = streamsArray.stream_request[k].buf_index;
6296                         mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6297                     }
6298                 }
6299             }
6300             mPendingLiveRequest++;
6301 
6302             // Start all streams after the first setting is sent, so that the
6303             // setting can be applied sooner: (0 + apply_delay)th frame.
6304             if (mState == CONFIGURED && mChannelHandle) {
6305                 //Then start them.
6306                 LOGH("Start META Channel");
6307                 rc = mMetadataChannel->start();
6308                 if (rc < 0) {
6309                     LOGE("META channel start failed");
6310                     pthread_mutex_unlock(&mMutex);
6311                     return rc;
6312                 }
6313 
6314                 if (mAnalysisChannel) {
6315                     rc = mAnalysisChannel->start();
6316                     if (rc < 0) {
6317                         LOGE("Analysis channel start failed");
6318                         mMetadataChannel->stop();
6319                         pthread_mutex_unlock(&mMutex);
6320                         return rc;
6321                     }
6322                 }
6323 
6324                 if (mSupportChannel) {
6325                     rc = mSupportChannel->start();
6326                     if (rc < 0) {
6327                         LOGE("Support channel start failed");
6328                         mMetadataChannel->stop();
6329                         /* Although support and analysis are mutually exclusive today
6330                            adding it in anycase for future proofing */
6331                         if (mAnalysisChannel) {
6332                             mAnalysisChannel->stop();
6333                         }
6334                         pthread_mutex_unlock(&mMutex);
6335                         return rc;
6336                     }
6337                 }
6338                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6339                         it != mStreamInfo.end(); it++) {
6340                     QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6341                     LOGH("Start Processing Channel mask=%d",
6342                             channel->getStreamTypeMask());
6343                     rc = channel->start();
6344                     if (rc < 0) {
6345                         LOGE("channel start failed");
6346                         pthread_mutex_unlock(&mMutex);
6347                         return rc;
6348                     }
6349                 }
6350 
6351                 if (mRawDumpChannel) {
6352                     LOGD("Starting raw dump stream");
6353                     rc = mRawDumpChannel->start();
6354                     if (rc != NO_ERROR) {
6355                         LOGE("Error Starting Raw Dump Channel");
6356                         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6357                               it != mStreamInfo.end(); it++) {
6358                             QCamera3Channel *channel =
6359                                 (QCamera3Channel *)(*it)->stream->priv;
6360                             LOGH("Stopping Processing Channel mask=%d",
6361                                 channel->getStreamTypeMask());
6362                             channel->stop();
6363                         }
6364                         if (mSupportChannel)
6365                             mSupportChannel->stop();
6366                         if (mAnalysisChannel) {
6367                             mAnalysisChannel->stop();
6368                         }
6369                         mMetadataChannel->stop();
6370                         pthread_mutex_unlock(&mMutex);
6371                         return rc;
6372                     }
6373                 }
6374 
6375                 // Configure modules for stream on.
6376                 rc = startChannelLocked();
6377                 if (rc != NO_ERROR) {
6378                     LOGE("startChannelLocked failed %d", rc);
6379                     pthread_mutex_unlock(&mMutex);
6380                     return rc;
6381                 }
6382             }
6383         }
6384     }
6385 
6386     // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
6387     {
6388         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6389         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
6390                 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6391                 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6392                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6393                 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6394                 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6395                 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
6396 
6397             if (isSessionHdrPlusModeCompatible()) {
6398                 rc = enableHdrPlusModeLocked();
6399                 if (rc != OK) {
6400                     LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6401                     pthread_mutex_unlock(&mMutex);
6402                     return rc;
6403                 }
6404             }
6405 
6406             mFirstPreviewIntentSeen = true;
6407         }
6408     }
6409 
6410     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6411 
6412     mState = STARTED;
6413     // Added a timed condition wait
6414     struct timespec ts;
6415     uint8_t isValidTimeout = 1;
6416     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
6417     if (rc < 0) {
6418       isValidTimeout = 0;
6419       LOGE("Error reading the real time clock!!");
6420     }
6421     else {
6422       // Make timeout as 5 sec for request to be honored
6423       int64_t timeout = 5;
6424       {
6425           Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6426           // If there is a pending HDR+ request, the following requests may be blocked until the
6427           // HDR+ request is done. So allow a longer timeout.
6428           if (mHdrPlusPendingRequests.size() > 0) {
6429               timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6430           }
6431       }
6432       ts.tv_sec += timeout;
6433     }
6434     //Block on conditional variable
6435     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
6436             (mState != ERROR) && (mState != DEINIT)) {
6437         if (!isValidTimeout) {
6438             LOGD("Blocking on conditional wait");
6439             pthread_cond_wait(&mRequestCond, &mMutex);
6440         }
6441         else {
6442             LOGD("Blocking on timed conditional wait");
6443             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6444             if (rc == ETIMEDOUT) {
6445                 rc = -ENODEV;
6446                 LOGE("Unblocked on timeout!!!!");
6447                 break;
6448             }
6449         }
6450         LOGD("Unblocked");
6451         if (mWokenUpByDaemon) {
6452             mWokenUpByDaemon = false;
6453             if (mPendingLiveRequest < mMaxInFlightRequests)
6454                 break;
6455         }
6456     }
6457     pthread_mutex_unlock(&mMutex);
6458 
6459     return rc;
6460 }
6461 
startChannelLocked()6462 int32_t QCamera3HardwareInterface::startChannelLocked()
6463 {
6464     // Configure modules for stream on.
6465     int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6466             mChannelHandle, /*start_sensor_streaming*/false);
6467     if (rc != NO_ERROR) {
6468         LOGE("start_channel failed %d", rc);
6469         return rc;
6470     }
6471 
6472     {
6473         // Configure Easel for stream on.
6474         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6475         if (EaselManagerClientOpened) {
6476             // Now that sensor mode should have been selected, get the selected sensor mode
6477             // info.
6478             memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6479             rc = getCurrentSensorModeInfo(mSensorModeInfo);
6480             if (rc != NO_ERROR) {
6481                 ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
6482                         strerror(-rc), rc);
6483                 return rc;
6484             }
6485             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6486             rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6487                     /*enableCapture*/true);
6488             if (rc != OK) {
6489                 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6490                         mCameraId, mSensorModeInfo.op_pixel_clk);
6491                 return rc;
6492             }
6493             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6494             mEaselMipiStarted = true;
6495         }
6496     }
6497 
6498     // Start sensor streaming.
6499     rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6500             mChannelHandle);
6501     if (rc != NO_ERROR) {
6502         LOGE("start_sensor_stream_on failed %d", rc);
6503         return rc;
6504     }
6505 
6506     return 0;
6507 }
6508 
stopChannelLocked(bool stopChannelImmediately)6509 void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6510 {
6511     mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6512             mChannelHandle, stopChannelImmediately);
6513 
6514     {
6515         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6516         if (EaselManagerClientOpened && mEaselMipiStarted) {
6517             int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6518             if (rc != 0) {
6519                 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6520             }
6521             mEaselMipiStarted = false;
6522         }
6523     }
6524 }
6525 
6526 /*===========================================================================
6527  * FUNCTION   : dump
6528  *
6529  * DESCRIPTION:
6530  *
6531  * PARAMETERS :
6532  *
6533  *
6534  * RETURN     :
6535  *==========================================================================*/
dump(int fd)6536 void QCamera3HardwareInterface::dump(int fd)
6537 {
6538     pthread_mutex_lock(&mMutex);
6539     dprintf(fd, "\n Camera HAL3 information Begin \n");
6540 
6541     dprintf(fd, "\nNumber of pending requests: %zu \n",
6542         mPendingRequestsList.size());
6543     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6544     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
6545     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6546     for(pendingRequestIterator i = mPendingRequestsList.begin();
6547             i != mPendingRequestsList.end(); i++) {
6548         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6549         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6550         i->input_buffer);
6551     }
6552     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6553                 mPendingBuffersMap.get_num_overall_buffers());
6554     dprintf(fd, "-------+------------------\n");
6555     dprintf(fd, " Frame | Stream type mask \n");
6556     dprintf(fd, "-------+------------------\n");
6557     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6558         for(auto &j : req.mPendingBufferList) {
6559             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6560             dprintf(fd, " %5d | %11d \n",
6561                     req.frame_number, channel->getStreamTypeMask());
6562         }
6563     }
6564     dprintf(fd, "-------+------------------\n");
6565 
6566     dprintf(fd, "\nPending frame drop list: %zu\n",
6567         mPendingFrameDropList.size());
6568     dprintf(fd, "-------+-----------\n");
6569     dprintf(fd, " Frame | Stream ID \n");
6570     dprintf(fd, "-------+-----------\n");
6571     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6572         i != mPendingFrameDropList.end(); i++) {
6573         dprintf(fd, " %5d | %9d \n",
6574             i->frame_number, i->stream_ID);
6575     }
6576     dprintf(fd, "-------+-----------\n");
6577 
6578     dprintf(fd, "\n Camera HAL3 information End \n");
6579 
6580     /* use dumpsys media.camera as trigger to send update debug level event */
6581     mUpdateDebugLevel = true;
6582     pthread_mutex_unlock(&mMutex);
6583     return;
6584 }
6585 
6586 /*===========================================================================
6587  * FUNCTION   : flush
6588  *
6589  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6590  *              conditionally restarts channels
6591  *
6592  * PARAMETERS :
6593  *  @ restartChannels: re-start all channels
6594  *  @ stopChannelImmediately: stop the channel immediately. This should be used
6595  *                            when device encountered an error and MIPI may has
6596  *                            been stopped.
6597  *
6598  * RETURN     :
6599  *          0 on success
6600  *          Error code on failure
6601  *==========================================================================*/
flush(bool restartChannels,bool stopChannelImmediately)6602 int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
6603 {
6604     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6605     int32_t rc = NO_ERROR;
6606 
6607     LOGD("Unblocking Process Capture Request");
6608     pthread_mutex_lock(&mMutex);
6609     mFlush = true;
6610     pthread_mutex_unlock(&mMutex);
6611 
6612     // Disable HDR+ if it's enabled;
6613     {
6614         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6615         finishHdrPlusClientOpeningLocked(l);
6616         disableHdrPlusModeLocked();
6617     }
6618 
6619     rc = stopAllChannels();
6620     // unlink of dualcam
6621     if (mIsDeviceLinked) {
6622         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6623                 &m_pDualCamCmdPtr->bundle_info;
6624         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
6625         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6626         pthread_mutex_lock(&gCamLock);
6627 
6628         if (mIsMainCamera == 1) {
6629             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6630             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
6631             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6632             // related session id should be session id of linked session
6633             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6634         } else {
6635             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6636             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
6637             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6638             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6639         }
6640         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
6641         pthread_mutex_unlock(&gCamLock);
6642 
6643         rc = mCameraHandle->ops->set_dual_cam_cmd(
6644                 mCameraHandle->camera_handle);
6645         if (rc < 0) {
6646             LOGE("Dualcam: Unlink failed, but still proceed to close");
6647         }
6648     }
6649 
6650     if (rc < 0) {
6651         LOGE("stopAllChannels failed");
6652         return rc;
6653     }
6654     if (mChannelHandle) {
6655         stopChannelLocked(stopChannelImmediately);
6656     }
6657 
6658     // Reset bundle info
6659     rc = setBundleInfo();
6660     if (rc < 0) {
6661         LOGE("setBundleInfo failed %d", rc);
6662         return rc;
6663     }
6664 
6665     // Mutex Lock
6666     pthread_mutex_lock(&mMutex);
6667 
6668     // Unblock process_capture_request
6669     mPendingLiveRequest = 0;
6670     pthread_cond_signal(&mRequestCond);
6671 
6672     rc = notifyErrorForPendingRequests();
6673     if (rc < 0) {
6674         LOGE("notifyErrorForPendingRequests failed");
6675         pthread_mutex_unlock(&mMutex);
6676         return rc;
6677     }
6678 
6679     mFlush = false;
6680 
6681     // Start the Streams/Channels
6682     if (restartChannels) {
6683         rc = startAllChannels();
6684         if (rc < 0) {
6685             LOGE("startAllChannels failed");
6686             pthread_mutex_unlock(&mMutex);
6687             return rc;
6688         }
6689         if (mChannelHandle) {
6690             // Configure modules for stream on.
6691             rc = startChannelLocked();
6692             if (rc < 0) {
6693                 LOGE("startChannelLocked failed");
6694                 pthread_mutex_unlock(&mMutex);
6695                 return rc;
6696             }
6697         }
6698         mFirstPreviewIntentSeen = false;
6699     }
6700     pthread_mutex_unlock(&mMutex);
6701 
6702     return 0;
6703 }
6704 
6705 /*===========================================================================
6706  * FUNCTION   : flushPerf
6707  *
6708  * DESCRIPTION: This is the performance optimization version of flush that does
6709  *              not use stream off, rather flushes the system
6710  *
6711  * PARAMETERS :
6712  *
6713  *
6714  * RETURN     : 0 : success
6715  *              -EINVAL: input is malformed (device is not valid)
6716  *              -ENODEV: if the device has encountered a serious error
6717  *==========================================================================*/
flushPerf()6718 int QCamera3HardwareInterface::flushPerf()
6719 {
6720     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6721     int32_t rc = 0;
6722     struct timespec timeout;
6723     bool timed_wait = false;
6724 
6725     pthread_mutex_lock(&mMutex);
6726     mFlushPerf = true;
6727     mPendingBuffersMap.numPendingBufsAtFlush =
6728         mPendingBuffersMap.get_num_overall_buffers();
6729     LOGD("Calling flush. Wait for %d buffers to return",
6730         mPendingBuffersMap.numPendingBufsAtFlush);
6731 
6732     /* send the flush event to the backend */
6733     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6734     if (rc < 0) {
6735         LOGE("Error in flush: IOCTL failure");
6736         mFlushPerf = false;
6737         pthread_mutex_unlock(&mMutex);
6738         return -ENODEV;
6739     }
6740 
6741     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6742         LOGD("No pending buffers in HAL, return flush");
6743         mFlushPerf = false;
6744         pthread_mutex_unlock(&mMutex);
6745         return rc;
6746     }
6747 
6748     /* wait on a signal that buffers were received */
6749     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
6750     if (rc < 0) {
6751         LOGE("Error reading the real time clock, cannot use timed wait");
6752     } else {
6753         timeout.tv_sec += FLUSH_TIMEOUT;
6754         timed_wait = true;
6755     }
6756 
6757     //Block on conditional variable
6758     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6759         LOGD("Waiting on mBuffersCond");
6760         if (!timed_wait) {
6761             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6762             if (rc != 0) {
6763                  LOGE("pthread_cond_wait failed due to rc = %s",
6764                         strerror(rc));
6765                  break;
6766             }
6767         } else {
6768             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6769             if (rc != 0) {
6770                 LOGE("pthread_cond_timedwait failed due to rc = %s",
6771                             strerror(rc));
6772                 break;
6773             }
6774         }
6775     }
6776     if (rc != 0) {
6777         mFlushPerf = false;
6778         pthread_mutex_unlock(&mMutex);
6779         return -ENODEV;
6780     }
6781 
6782     LOGD("Received buffers, now safe to return them");
6783 
6784     //make sure the channels handle flush
6785     //currently only required for the picture channel to release snapshot resources
6786     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6787             it != mStreamInfo.end(); it++) {
6788         QCamera3Channel *channel = (*it)->channel;
6789         if (channel) {
6790             rc = channel->flush();
6791             if (rc) {
6792                LOGE("Flushing the channels failed with error %d", rc);
6793                // even though the channel flush failed we need to continue and
6794                // return the buffers we have to the framework, however the return
6795                // value will be an error
6796                rc = -ENODEV;
6797             }
6798         }
6799     }
6800 
6801     /* notify the frameworks and send errored results */
6802     rc = notifyErrorForPendingRequests();
6803     if (rc < 0) {
6804         LOGE("notifyErrorForPendingRequests failed");
6805         pthread_mutex_unlock(&mMutex);
6806         return rc;
6807     }
6808 
6809     //unblock process_capture_request
6810     mPendingLiveRequest = 0;
6811     unblockRequestIfNecessary();
6812 
6813     mFlushPerf = false;
6814     pthread_mutex_unlock(&mMutex);
6815     LOGD ("Flush Operation complete. rc = %d", rc);
6816     return rc;
6817 }
6818 
6819 /*===========================================================================
6820  * FUNCTION   : handleCameraDeviceError
6821  *
6822  * DESCRIPTION: This function calls internal flush and notifies the error to
6823  *              framework and updates the state variable.
6824  *
6825  * PARAMETERS :
6826  *   @stopChannelImmediately : stop channels immediately without waiting for
6827  *                             frame boundary.
6828  *
6829  * RETURN     : NO_ERROR on Success
6830  *              Error code on failure
6831  *==========================================================================*/
handleCameraDeviceError(bool stopChannelImmediately)6832 int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
6833 {
6834     int32_t rc = NO_ERROR;
6835 
6836     {
6837         Mutex::Autolock lock(mFlushLock);
6838         pthread_mutex_lock(&mMutex);
6839         if (mState != ERROR) {
6840             //if mState != ERROR, nothing to be done
6841             pthread_mutex_unlock(&mMutex);
6842             return NO_ERROR;
6843         }
6844         pthread_mutex_unlock(&mMutex);
6845 
6846         rc = flush(false /* restart channels */, stopChannelImmediately);
6847         if (NO_ERROR != rc) {
6848             LOGE("internal flush to handle mState = ERROR failed");
6849         }
6850 
6851         pthread_mutex_lock(&mMutex);
6852         mState = DEINIT;
6853         pthread_mutex_unlock(&mMutex);
6854     }
6855 
6856     camera3_notify_msg_t notify_msg;
6857     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6858     notify_msg.type = CAMERA3_MSG_ERROR;
6859     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6860     notify_msg.message.error.error_stream = NULL;
6861     notify_msg.message.error.frame_number = 0;
6862     orchestrateNotify(&notify_msg);
6863 
6864     return rc;
6865 }
6866 
6867 /*===========================================================================
6868  * FUNCTION   : captureResultCb
6869  *
6870  * DESCRIPTION: Callback handler for all capture result
6871  *              (streams, as well as metadata)
6872  *
6873  * PARAMETERS :
6874  *   @metadata : metadata information
6875  *   @buffer   : actual gralloc buffer to be returned to frameworks.
6876  *               NULL if metadata.
6877  *
6878  * RETURN     : NONE
6879  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)6880 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6881                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6882 {
6883     if (metadata_buf) {
6884         pthread_mutex_lock(&mMutex);
6885         uint8_t batchSize = mBatchSize;
6886         pthread_mutex_unlock(&mMutex);
6887         if (batchSize) {
6888             handleBatchMetadata(metadata_buf,
6889                     true /* free_and_bufdone_meta_buf */);
6890         } else { /* mBatchSize = 0 */
6891             hdrPlusPerfLock(metadata_buf);
6892             pthread_mutex_lock(&mMutex);
6893             handleMetadataWithLock(metadata_buf,
6894                     true /* free_and_bufdone_meta_buf */,
6895                     true /* last urgent frame of batch metadata */,
6896                     true /* last frame of batch metadata */,
6897                     NULL);
6898             pthread_mutex_unlock(&mMutex);
6899         }
6900     } else if (isInputBuffer) {
6901         pthread_mutex_lock(&mMutex);
6902         handleInputBufferWithLock(frame_number);
6903         pthread_mutex_unlock(&mMutex);
6904     } else {
6905         pthread_mutex_lock(&mMutex);
6906         handleBufferWithLock(buffer, frame_number);
6907         pthread_mutex_unlock(&mMutex);
6908     }
6909     return;
6910 }
6911 
6912 /*===========================================================================
6913  * FUNCTION   : getReprocessibleOutputStreamId
6914  *
6915  * DESCRIPTION: Get source output stream id for the input reprocess stream
6916  *              based on size and format, which would be the largest
6917  *              output stream if an input stream exists.
6918  *
6919  * PARAMETERS :
6920  *   @id      : return the stream id if found
6921  *
6922  * RETURN     : int32_t type of status
6923  *              NO_ERROR  -- success
6924  *              none-zero failure code
6925  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)6926 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6927 {
6928     /* check if any output or bidirectional stream with the same size and format
6929        and return that stream */
6930     if ((mInputStreamInfo.dim.width > 0) &&
6931             (mInputStreamInfo.dim.height > 0)) {
6932         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6933                 it != mStreamInfo.end(); it++) {
6934 
6935             camera3_stream_t *stream = (*it)->stream;
6936             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6937                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6938                     (stream->format == mInputStreamInfo.format)) {
6939                 // Usage flag for an input stream and the source output stream
6940                 // may be different.
6941                 LOGD("Found reprocessible output stream! %p", *it);
6942                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6943                          stream->usage, mInputStreamInfo.usage);
6944 
6945                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6946                 if (channel != NULL && channel->mStreams[0]) {
6947                     id = channel->mStreams[0]->getMyServerID();
6948                     return NO_ERROR;
6949                 }
6950             }
6951         }
6952     } else {
6953         LOGD("No input stream, so no reprocessible output stream");
6954     }
6955     return NAME_NOT_FOUND;
6956 }
6957 
6958 /*===========================================================================
6959  * FUNCTION   : lookupFwkName
6960  *
6961  * DESCRIPTION: In case the enum is not same in fwk and backend
6962  *              make sure the parameter is correctly propogated
6963  *
6964  * PARAMETERS  :
6965  *   @arr      : map between the two enums
6966  *   @len      : len of the map
6967  *   @hal_name : name of the hal_parm to map
6968  *
6969  * RETURN     : int type of status
6970  *              fwk_name  -- success
6971  *              none-zero failure code
6972  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)6973 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6974         size_t len, halType hal_name)
6975 {
6976 
6977     for (size_t i = 0; i < len; i++) {
6978         if (arr[i].hal_name == hal_name) {
6979             return arr[i].fwk_name;
6980         }
6981     }
6982 
6983     /* Not able to find matching framework type is not necessarily
6984      * an error case. This happens when mm-camera supports more attributes
6985      * than the frameworks do */
6986     LOGH("Cannot find matching framework type");
6987     return NAME_NOT_FOUND;
6988 }
6989 
6990 /*===========================================================================
6991  * FUNCTION   : lookupHalName
6992  *
6993  * DESCRIPTION: In case the enum is not same in fwk and backend
6994  *              make sure the parameter is correctly propogated
6995  *
6996  * PARAMETERS  :
6997  *   @arr      : map between the two enums
6998  *   @len      : len of the map
6999  *   @fwk_name : name of the hal_parm to map
7000  *
7001  * RETURN     : int32_t type of status
7002  *              hal_name  -- success
7003  *              none-zero failure code
7004  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)7005 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
7006         size_t len, fwkType fwk_name)
7007 {
7008     for (size_t i = 0; i < len; i++) {
7009         if (arr[i].fwk_name == fwk_name) {
7010             return arr[i].hal_name;
7011         }
7012     }
7013 
7014     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
7015     return NAME_NOT_FOUND;
7016 }
7017 
7018 /*===========================================================================
7019  * FUNCTION   : lookupProp
7020  *
7021  * DESCRIPTION: lookup a value by its name
7022  *
7023  * PARAMETERS :
7024  *   @arr     : map between the two enums
7025  *   @len     : size of the map
7026  *   @name    : name to be looked up
7027  *
7028  * RETURN     : Value if found
7029  *              CAM_CDS_MODE_MAX if not found
7030  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)7031 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
7032         size_t len, const char *name)
7033 {
7034     if (name) {
7035         for (size_t i = 0; i < len; i++) {
7036             if (!strcmp(arr[i].desc, name)) {
7037                 return arr[i].val;
7038             }
7039         }
7040     }
7041     return CAM_CDS_MODE_MAX;
7042 }
7043 
7044 /*===========================================================================
7045  *
7046  * DESCRIPTION:
7047  *
7048  * PARAMETERS :
7049  *   @metadata : metadata information from callback
7050  *   @pendingRequest: pending request for this metadata
7051  *   @pprocDone: whether internal offline postprocsesing is done
7052  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
7053  *                         in a batch. Always true for non-batch mode.
7054  *
7055  * RETURN     : camera_metadata_t*
7056  *              metadata in a format specified by fwk
7057  *==========================================================================*/
7058 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch,const bool * enableZsl)7059 QCamera3HardwareInterface::translateFromHalMetadata(
7060                                  metadata_buffer_t *metadata,
7061                                  const PendingRequestInfo& pendingRequest,
7062                                  bool pprocDone,
7063                                  bool lastMetadataInBatch,
7064                                  const bool *enableZsl)
7065 {
7066     CameraMetadata camMetadata;
7067     camera_metadata_t *resultMetadata;
7068 
7069     if (!lastMetadataInBatch) {
7070         /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
7071          * Timestamp is needed because it's used for shutter notify calculation.
7072          * */
7073         camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7074         resultMetadata = camMetadata.release();
7075         return resultMetadata;
7076     }
7077 
7078     if (pendingRequest.jpegMetadata.entryCount())
7079         camMetadata.append(pendingRequest.jpegMetadata);
7080 
7081     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7082     camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
7083     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
7084     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
7085     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
7086     camMetadata.update(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE, &pendingRequest.motion_detection_enable, 1);
7087     if (mBatchSize == 0) {
7088         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
7089         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
7090     }
7091 
7092     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
7093     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
7094     if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
7095         // DevCamDebug metadata translateFromHalMetadata AF
7096         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
7097                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
7098             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
7099             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
7100         }
7101         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
7102                 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
7103             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
7104             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
7105         }
7106         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
7107                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
7108             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
7109             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
7110         }
7111         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
7112                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
7113             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
7114             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
7115         }
7116         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
7117                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
7118             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
7119             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
7120         }
7121         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
7122                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
7123             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
7124                 *DevCamDebug_af_monitor_pdaf_target_pos;
7125             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7126                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
7127         }
7128         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
7129                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
7130             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
7131                 *DevCamDebug_af_monitor_pdaf_confidence;
7132             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7133                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
7134         }
7135         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
7136                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
7137             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
7138             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7139                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
7140         }
7141         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
7142                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
7143             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
7144                 *DevCamDebug_af_monitor_tof_target_pos;
7145             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7146                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
7147         }
7148         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
7149                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
7150             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
7151                 *DevCamDebug_af_monitor_tof_confidence;
7152             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7153                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
7154         }
7155         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
7156                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
7157             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
7158             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7159                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
7160         }
7161         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
7162                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
7163             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
7164             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7165                 &fwk_DevCamDebug_af_monitor_type_select, 1);
7166         }
7167         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
7168                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
7169             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
7170             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7171                 &fwk_DevCamDebug_af_monitor_refocus, 1);
7172         }
7173         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
7174                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
7175             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
7176             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7177                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
7178         }
7179         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
7180                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
7181             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
7182                 *DevCamDebug_af_search_pdaf_target_pos;
7183             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7184                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
7185         }
7186         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
7187                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
7188             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
7189             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7190                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
7191         }
7192         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
7193                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
7194             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
7195             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7196                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
7197         }
7198         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
7199                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
7200             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
7201             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7202                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
7203         }
7204         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
7205                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
7206             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
7207             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7208                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
7209         }
7210         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
7211                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
7212             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
7213                 *DevCamDebug_af_search_tof_target_pos;
7214             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7215                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
7216         }
7217         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
7218                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
7219             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
7220             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7221                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
7222         }
7223         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
7224                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
7225             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
7226             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7227                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
7228         }
7229         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
7230                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
7231             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
7232             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7233                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
7234         }
7235         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
7236                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
7237             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
7238             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7239                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
7240         }
7241         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
7242                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
7243             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
7244             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7245                 &fwk_DevCamDebug_af_search_type_select, 1);
7246         }
7247         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
7248                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
7249             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
7250             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7251                 &fwk_DevCamDebug_af_search_next_pos, 1);
7252         }
7253         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
7254                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
7255             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
7256             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7257                 &fwk_DevCamDebug_af_search_target_pos, 1);
7258         }
7259         // DevCamDebug metadata translateFromHalMetadata AEC
7260         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
7261                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
7262             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
7263             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
7264     }
7265         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
7266                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
7267             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
7268             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
7269         }
7270         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7271                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7272             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7273             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7274         }
7275         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7276                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7277             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7278             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7279         }
7280         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7281                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7282             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7283             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7284         }
7285         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7286                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7287             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7288             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7289         }
7290         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7291                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7292             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7293             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7294         }
7295         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7296                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7297             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7298             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7299         }
7300         // DevCamDebug metadata translateFromHalMetadata zzHDR
7301         IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7302                 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7303             float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7304             camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7305         }
7306         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7307                 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
7308             int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
7309             camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7310         }
7311         IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7312                 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7313             float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7314             camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7315         }
7316         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7317                 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
7318             int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
7319             camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7320         }
7321         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7322                 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7323             float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7324                 *DevCamDebug_aec_hdr_sensitivity_ratio;
7325             camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7326                                &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7327         }
7328         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7329                 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7330             float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7331             camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7332                                &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7333         }
7334         // DevCamDebug metadata translateFromHalMetadata ADRC
7335         IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7336                 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7337             float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7338             camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7339                                &fwk_DevCamDebug_aec_total_drc_gain, 1);
7340         }
7341         IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7342                 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7343             float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7344             camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7345                                &fwk_DevCamDebug_aec_color_drc_gain, 1);
7346         }
7347         IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7348                 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7349             float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7350             camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7351         }
7352         IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7353                 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7354             float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7355             camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7356         }
7357         IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7358                 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7359             float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7360             camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7361         }
7362         IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7363                 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7364             float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7365             camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7366         }
7367         // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7368         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7369                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7370             float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7371             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7372                                &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7373         }
7374         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7375                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7376             float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7377             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7378                                &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7379         }
7380         IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7381                 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7382             float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7383             camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7384                                &fwk_DevCamDebug_aec_subject_motion, 1);
7385         }
7386         // DevCamDebug metadata translateFromHalMetadata AWB
7387         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7388                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7389             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7390             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7391         }
7392         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7393                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7394             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7395             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7396         }
7397         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7398                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7399             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7400             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7401         }
7402         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7403                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7404             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7405             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7406         }
7407         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7408                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7409             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7410             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7411         }
7412     }
7413     // atrace_end(ATRACE_TAG_ALWAYS);
7414 
7415     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7416         int64_t fwk_frame_number = *frame_number;
7417         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7418     }
7419 
7420     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7421             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7422         int32_t scalerCropRegion[4];
7423         scalerCropRegion[0] = hScalerCropRegion->left;
7424         scalerCropRegion[1] = hScalerCropRegion->top;
7425         scalerCropRegion[2] = hScalerCropRegion->width;
7426         scalerCropRegion[3] = hScalerCropRegion->height;
7427 
7428         // Adjust crop region from sensor output coordinate system to active
7429         // array coordinate system.
7430         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7431                 scalerCropRegion[2], scalerCropRegion[3], pendingRequest.zoomRatio);
7432 
7433         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7434     }
7435 
7436     camMetadata.update(ANDROID_CONTROL_ZOOM_RATIO, &pendingRequest.zoomRatio, 1);
7437 
7438     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7439         int32_t fps_range[2];
7440         fps_range[0] = (int32_t)float_range->min_fps;
7441         fps_range[1] = (int32_t)float_range->max_fps;
7442         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7443                                       fps_range, 2);
7444         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7445              fps_range[0], fps_range[1]);
7446     }
7447 
7448     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7449         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7450     }
7451 
7452     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7453         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7454                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7455                 *sceneMode);
7456         if (NAME_NOT_FOUND != val) {
7457             uint8_t fwkSceneMode = (uint8_t)val;
7458             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7459             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7460                      fwkSceneMode);
7461         }
7462     }
7463 
7464     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7465         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7466         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7467     }
7468 
7469     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7470         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7471         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7472     }
7473 
7474     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7475         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7476         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7477     }
7478 
7479     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7480             CAM_INTF_META_EDGE_MODE, metadata) {
7481         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7482     }
7483 
7484     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7485         uint8_t fwk_flashPower = (uint8_t) *flashPower;
7486         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7487     }
7488 
7489     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7490         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7491     }
7492 
7493     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7494         if (0 <= *flashState) {
7495             uint8_t fwk_flashState = (uint8_t) *flashState;
7496             if (!gCamCapability[mCameraId]->flash_available) {
7497                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7498             }
7499             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7500         }
7501     }
7502 
7503     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7504         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7505         if (NAME_NOT_FOUND != val) {
7506             uint8_t fwk_flashMode = (uint8_t)val;
7507             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7508         }
7509     }
7510 
7511     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7512         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7513         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7514     }
7515 
7516     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7517         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7518     }
7519 
7520     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7521         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7522     }
7523 
7524     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7525         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7526     }
7527 
7528     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7529         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7530         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7531     }
7532 
7533     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7534         uint8_t fwk_videoStab = (uint8_t) *videoStab;
7535         LOGD("fwk_videoStab = %d", fwk_videoStab);
7536         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7537     } else {
7538         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7539         // and so hardcoding the Video Stab result to OFF mode.
7540         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7541         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
7542         LOGD("EIS result default to OFF mode");
7543     }
7544 
7545     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7546         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7547         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7548     }
7549 
7550     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7551         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7552     }
7553 
7554     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7555         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
7556         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
7557 
7558         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7559               gCamCapability[mCameraId]->color_arrangement);
7560 
7561         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
7562           blackLevelAppliedPattern->cam_black_level[0],
7563           blackLevelAppliedPattern->cam_black_level[1],
7564           blackLevelAppliedPattern->cam_black_level[2],
7565           blackLevelAppliedPattern->cam_black_level[3]);
7566         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7567                 BLACK_LEVEL_PATTERN_CNT);
7568 
7569 #ifndef USE_HAL_3_3
7570         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
7571         // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
7572         // depth space.
7573         fwk_blackLevelInd[0] /= 16.0;
7574         fwk_blackLevelInd[1] /= 16.0;
7575         fwk_blackLevelInd[2] /= 16.0;
7576         fwk_blackLevelInd[3] /= 16.0;
7577         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7578                 BLACK_LEVEL_PATTERN_CNT);
7579 #endif
7580     }
7581 
7582 #ifndef USE_HAL_3_3
7583     // Fixed whitelevel is used by ISP/Sensor
7584     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7585             &gCamCapability[mCameraId]->white_level, 1);
7586 #endif
7587 
7588     IF_META_AVAILABLE(cam_eis_crop_info_t, eisCropInfo,
7589             CAM_INTF_META_EIS_CROP_INFO, metadata) {
7590         mLastEISCropInfo = *eisCropInfo;
7591 
7592         //mLastEISCropInfo contains combined zoom_ratio.
7593         mCropRegionMapper.toActiveArray(mLastEISCropInfo.delta_x, mLastEISCropInfo.delta_y,
7594                 mLastEISCropInfo.delta_width, mLastEISCropInfo.delta_height, 1.0f/*zoom_ratio*/);
7595     }
7596 
7597     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7598         LOGD("sensorExpTime = %lld", *sensorExpTime);
7599         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7600     }
7601 
7602     IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7603         LOGD("expTimeBoost = %f", *expTimeBoost);
7604         camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7605     }
7606 
7607     IF_META_AVAILABLE(int64_t, sensorFameDuration,
7608             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7609         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7610         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7611     }
7612 
7613     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7614             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7615         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7616         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7617                 sensorRollingShutterSkew, 1);
7618     }
7619 
7620     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7621         LOGD("sensorSensitivity = %d", *sensorSensitivity);
7622         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7623 
7624         //calculate the noise profile based on sensitivity
7625         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7626         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7627         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7628         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7629             noise_profile[i]   = noise_profile_S;
7630             noise_profile[i+1] = noise_profile_O;
7631         }
7632         LOGD("noise model entry (S, O) is (%f, %f)",
7633                 noise_profile_S, noise_profile_O);
7634         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7635                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7636     }
7637 
7638 #ifndef USE_HAL_3_3
7639     int32_t fwk_ispSensitivity = 100;
7640     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
7641         fwk_ispSensitivity = (int32_t) *ispSensitivity;
7642     }
7643     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7644         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7645     }
7646     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
7647 #endif
7648 
7649     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7650         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7651         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7652     }
7653 
7654     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7655         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7656                 *faceDetectMode);
7657         if (NAME_NOT_FOUND != val) {
7658             uint8_t fwk_faceDetectMode = (uint8_t)val;
7659             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7660 
7661             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7662                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7663                         CAM_INTF_META_FACE_DETECTION, metadata) {
7664                     uint8_t numFaces = MIN(
7665                             faceDetectionInfo->num_faces_detected, MAX_ROI);
7666                     int32_t faceIds[MAX_ROI];
7667                     uint8_t faceScores[MAX_ROI];
7668                     int32_t faceRectangles[MAX_ROI * 4];
7669                     int32_t faceLandmarks[MAX_ROI * 6];
7670                     size_t j = 0, k = 0;
7671 
7672                     for (size_t i = 0; i < numFaces; i++) {
7673                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7674                         // Adjust crop region from sensor output coordinate system to active
7675                         // array coordinate system.
7676                         cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
7677                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
7678                                 rect.width, rect.height, pendingRequest.zoomRatio);
7679 
7680                         convertToRegions(rect, faceRectangles+j, -1);
7681 
7682                         LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7683                                 "bottom-right (%d, %d)",
7684                                 faceDetectionInfo->frame_id, i,
7685                                 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7686                                 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7687 
7688                         j+= 4;
7689                     }
7690                     if (numFaces <= 0) {
7691                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7692                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7693                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7694                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7695                     }
7696 
7697                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7698                             numFaces);
7699                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7700                             faceRectangles, numFaces * 4U);
7701                     if (fwk_faceDetectMode ==
7702                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7703                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7704                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
7705 
7706                             for (size_t i = 0; i < numFaces; i++) {
7707                                 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
7708                                 // Map the co-ordinate sensor output coordinate system to active
7709                                 // array coordinate system.
7710                                 mCropRegionMapper.toActiveArray(
7711                                         face_landmarks.left_eye_center.x,
7712                                         face_landmarks.left_eye_center.y,
7713                                         pendingRequest.zoomRatio);
7714                                 mCropRegionMapper.toActiveArray(
7715                                         face_landmarks.right_eye_center.x,
7716                                         face_landmarks.right_eye_center.y,
7717                                         pendingRequest.zoomRatio);
7718                                 mCropRegionMapper.toActiveArray(
7719                                         face_landmarks.mouth_center.x,
7720                                         face_landmarks.mouth_center.y,
7721                                         pendingRequest.zoomRatio);
7722 
7723                                 convertLandmarks(face_landmarks, faceLandmarks+k);
7724 
7725                                 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7726                                         "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7727                                         faceDetectionInfo->frame_id, i,
7728                                         faceLandmarks[k + LEFT_EYE_X],
7729                                         faceLandmarks[k + LEFT_EYE_Y],
7730                                         faceLandmarks[k + RIGHT_EYE_X],
7731                                         faceLandmarks[k + RIGHT_EYE_Y],
7732                                         faceLandmarks[k + MOUTH_X],
7733                                         faceLandmarks[k + MOUTH_Y]);
7734 
7735                                 k+= TOTAL_LANDMARK_INDICES;
7736                             }
7737                         } else {
7738                             for (size_t i = 0; i < numFaces; i++) {
7739                                 setInvalidLandmarks(faceLandmarks+k);
7740                                 k+= TOTAL_LANDMARK_INDICES;
7741                             }
7742                         }
7743 
7744                         for (size_t i = 0; i < numFaces; i++) {
7745                             faceIds[i] = faceDetectionInfo->faces[i].face_id;
7746 
7747                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7748                                     faceDetectionInfo->frame_id, i, faceIds[i]);
7749                         }
7750 
7751                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7752                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7753                                 faceLandmarks, numFaces * 6U);
7754                     }
7755                     IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7756                             CAM_INTF_META_FACE_BLINK, metadata) {
7757                         uint8_t detected[MAX_ROI];
7758                         uint8_t degree[MAX_ROI * 2];
7759                         for (size_t i = 0; i < numFaces; i++) {
7760                             detected[i] = blinks->blink[i].blink_detected;
7761                             degree[2 * i] = blinks->blink[i].left_blink;
7762                             degree[2 * i + 1] = blinks->blink[i].right_blink;
7763 
7764                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7765                                     "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7766                                     faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7767                                     degree[2 * i + 1]);
7768                         }
7769                         camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7770                                 detected, numFaces);
7771                         camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7772                                 degree, numFaces * 2);
7773                     }
7774                     IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7775                             CAM_INTF_META_FACE_SMILE, metadata) {
7776                         uint8_t degree[MAX_ROI];
7777                         uint8_t confidence[MAX_ROI];
7778                         for (size_t i = 0; i < numFaces; i++) {
7779                             degree[i] = smiles->smile[i].smile_degree;
7780                             confidence[i] = smiles->smile[i].smile_confidence;
7781 
7782                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7783                                     "smile_degree=%d, smile_score=%d",
7784                                     faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
7785                         }
7786                         camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7787                                 degree, numFaces);
7788                         camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7789                                 confidence, numFaces);
7790                     }
7791                     IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7792                             CAM_INTF_META_FACE_GAZE, metadata) {
7793                         int8_t angle[MAX_ROI];
7794                         int32_t direction[MAX_ROI * 3];
7795                         int8_t degree[MAX_ROI * 2];
7796                         for (size_t i = 0; i < numFaces; i++) {
7797                             angle[i] = gazes->gaze[i].gaze_angle;
7798                             direction[3 * i] = gazes->gaze[i].updown_dir;
7799                             direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7800                             direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7801                             degree[2 * i] = gazes->gaze[i].left_right_gaze;
7802                             degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7803 
7804                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7805                                     "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7806                                     "left_right_gaze=%d, top_bottom_gaze=%d",
7807                                     faceDetectionInfo->frame_id, i, angle[i],
7808                                     direction[3 * i], direction[3 * i + 1],
7809                                     direction[3 * i + 2],
7810                                     degree[2 * i], degree[2 * i + 1]);
7811                         }
7812                         camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7813                                 (uint8_t *)angle, numFaces);
7814                         camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7815                                 direction, numFaces * 3);
7816                         camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7817                                 (uint8_t *)degree, numFaces * 2);
7818                     }
7819                 }
7820             }
7821         }
7822     }
7823 
7824     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7825         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
7826         int32_t histogramBins = 0;
7827         camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
7828         camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
7829 
7830         IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7831             histogramBins = *histBins;
7832             camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7833         }
7834 
7835         if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
7836             IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7837                 // process histogram statistics info
7838                 int32_t* histogramData = NULL;
7839 
7840                 switch (stats_data->type) {
7841                 case CAM_HISTOGRAM_TYPE_BAYER:
7842                     switch (stats_data->bayer_stats.data_type) {
7843                         case CAM_STATS_CHANNEL_GR:
7844                           histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7845                           break;
7846                         case CAM_STATS_CHANNEL_GB:
7847                           histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7848                           break;
7849                         case CAM_STATS_CHANNEL_B:
7850                           histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7851                           break;
7852                         case CAM_STATS_CHANNEL_Y:
7853                         case CAM_STATS_CHANNEL_ALL:
7854                         case CAM_STATS_CHANNEL_R:
7855                         default:
7856                           histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7857                           break;
7858                     }
7859                     break;
7860                 case CAM_HISTOGRAM_TYPE_YUV:
7861                     histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
7862                     break;
7863                 }
7864 
7865                 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
7866             }
7867         }
7868     }
7869 
7870     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7871             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7872         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7873         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7874     }
7875 
7876     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7877             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7878         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7879                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7880     }
7881 
7882     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7883             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7884         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7885                 CAM_MAX_SHADING_MAP_HEIGHT);
7886         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7887                 CAM_MAX_SHADING_MAP_WIDTH);
7888         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7889                 lensShadingMap->lens_shading, 4U * map_width * map_height);
7890     }
7891 
7892     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7893         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7894         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7895     }
7896 
7897     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7898         //Populate CAM_INTF_META_TONEMAP_CURVES
7899         /* ch0 = G, ch 1 = B, ch 2 = R*/
7900         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7901             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7902                      tonemap->tonemap_points_cnt,
7903                     CAM_MAX_TONEMAP_CURVE_SIZE);
7904             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7905         }
7906 
7907         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7908                         &tonemap->curves[0].tonemap_points[0][0],
7909                         tonemap->tonemap_points_cnt * 2);
7910 
7911         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7912                         &tonemap->curves[1].tonemap_points[0][0],
7913                         tonemap->tonemap_points_cnt * 2);
7914 
7915         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7916                         &tonemap->curves[2].tonemap_points[0][0],
7917                         tonemap->tonemap_points_cnt * 2);
7918     }
7919 
7920     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7921             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7922         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7923                 CC_GAIN_MAX);
7924     }
7925 
7926     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7927             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7928         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7929                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7930                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7931     }
7932 
7933     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7934             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7935         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7936             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7937                      toneCurve->tonemap_points_cnt,
7938                     CAM_MAX_TONEMAP_CURVE_SIZE);
7939             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7940         }
7941         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7942                 (float*)toneCurve->curve.tonemap_points,
7943                 toneCurve->tonemap_points_cnt * 2);
7944     }
7945 
7946     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7947             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7948         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7949                 predColorCorrectionGains->gains, 4);
7950     }
7951 
7952     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7953             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7954         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7955                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7956                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7957     }
7958 
7959     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7960         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7961     }
7962 
7963     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7964         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7965         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7966     }
7967 
7968     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7969         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7970         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7971     }
7972 
7973     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7974         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7975                 *effectMode);
7976         if (NAME_NOT_FOUND != val) {
7977             uint8_t fwk_effectMode = (uint8_t)val;
7978             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7979         }
7980     }
7981 
7982     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7983             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7984         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7985                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7986         if (NAME_NOT_FOUND != fwk_testPatternMode) {
7987             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7988         }
7989         int32_t fwk_testPatternData[4];
7990         fwk_testPatternData[0] = testPatternData->r;
7991         fwk_testPatternData[3] = testPatternData->b;
7992         switch (gCamCapability[mCameraId]->color_arrangement) {
7993         case CAM_FILTER_ARRANGEMENT_RGGB:
7994         case CAM_FILTER_ARRANGEMENT_GRBG:
7995             fwk_testPatternData[1] = testPatternData->gr;
7996             fwk_testPatternData[2] = testPatternData->gb;
7997             break;
7998         case CAM_FILTER_ARRANGEMENT_GBRG:
7999         case CAM_FILTER_ARRANGEMENT_BGGR:
8000             fwk_testPatternData[2] = testPatternData->gr;
8001             fwk_testPatternData[1] = testPatternData->gb;
8002             break;
8003         default:
8004             LOGE("color arrangement %d is not supported",
8005                 gCamCapability[mCameraId]->color_arrangement);
8006             break;
8007         }
8008         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
8009     }
8010 
8011     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
8012         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
8013     }
8014 
8015     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
8016         String8 str((const char *)gps_methods);
8017         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
8018     }
8019 
8020     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
8021         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
8022     }
8023 
8024     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
8025         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
8026     }
8027 
8028     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
8029         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
8030         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
8031     }
8032 
8033     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
8034         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
8035         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
8036     }
8037 
8038     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
8039         int32_t fwk_thumb_size[2];
8040         fwk_thumb_size[0] = thumb_size->width;
8041         fwk_thumb_size[1] = thumb_size->height;
8042         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
8043     }
8044 
8045     // Skip reprocess metadata if there is no input stream.
8046     if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
8047         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
8048             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
8049                     privateData,
8050                     MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
8051         }
8052     }
8053 
8054     IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
8055         camMetadata.update(QCAMERA3_EXPOSURE_METER,
8056                 meteringMode, 1);
8057     }
8058 
8059     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
8060             CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
8061         LOGD("hdr_scene_data: %d %f\n",
8062                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
8063         uint8_t isHdr = hdr_scene_data->is_hdr_scene;
8064         float isHdrConfidence = hdr_scene_data->hdr_confidence;
8065         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
8066                            &isHdr, 1);
8067         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
8068                            &isHdrConfidence, 1);
8069     }
8070 
8071 
8072 
8073     if (metadata->is_tuning_params_valid) {
8074         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
8075         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
8076         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
8077 
8078 
8079         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
8080                 sizeof(uint32_t));
8081         data += sizeof(uint32_t);
8082 
8083         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
8084                 sizeof(uint32_t));
8085         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8086         data += sizeof(uint32_t);
8087 
8088         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
8089                 sizeof(uint32_t));
8090         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8091         data += sizeof(uint32_t);
8092 
8093         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
8094                 sizeof(uint32_t));
8095         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8096         data += sizeof(uint32_t);
8097 
8098         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
8099                 sizeof(uint32_t));
8100         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8101         data += sizeof(uint32_t);
8102 
8103         metadata->tuning_params.tuning_mod3_data_size = 0;
8104         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
8105                 sizeof(uint32_t));
8106         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8107         data += sizeof(uint32_t);
8108 
8109         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
8110                 TUNING_SENSOR_DATA_MAX);
8111         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
8112                 count);
8113         data += count;
8114 
8115         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
8116                 TUNING_VFE_DATA_MAX);
8117         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
8118                 count);
8119         data += count;
8120 
8121         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
8122                 TUNING_CPP_DATA_MAX);
8123         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
8124                 count);
8125         data += count;
8126 
8127         count = MIN(metadata->tuning_params.tuning_cac_data_size,
8128                 TUNING_CAC_DATA_MAX);
8129         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
8130                 count);
8131         data += count;
8132 
8133         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
8134                 (int32_t *)(void *)tuning_meta_data_blob,
8135                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
8136     }
8137 
8138     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
8139             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
8140         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8141                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
8142                 NEUTRAL_COL_POINTS);
8143     }
8144 
8145     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
8146         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
8147         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
8148     }
8149 
8150     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
8151         int32_t aeRegions[REGIONS_TUPLE_COUNT];
8152         // Adjust crop region from sensor output coordinate system to active
8153         // array coordinate system.
8154         cam_rect_t hAeRect = hAeRegions->rect;
8155         mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
8156                 hAeRect.width, hAeRect.height, pendingRequest.zoomRatio);
8157 
8158         convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
8159         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
8160                 REGIONS_TUPLE_COUNT);
8161         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8162                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
8163                 hAeRect.left, hAeRect.top, hAeRect.width,
8164                 hAeRect.height);
8165     }
8166 
8167     if (!pendingRequest.focusStateSent) {
8168         if (pendingRequest.focusStateValid) {
8169             camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
8170             LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
8171         } else {
8172             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8173                 uint8_t fwk_afState = (uint8_t) *afState;
8174                 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
8175                 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
8176             }
8177         }
8178     }
8179 
8180     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
8181         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
8182         mLastFocusDistance = *focusDistance;
8183     } else {
8184         LOGE("Missing LENS_FOCUS_DISTANCE metadata. Use last known distance of %f",
8185                 mLastFocusDistance);
8186         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , &mLastFocusDistance, 1);
8187     }
8188 
8189     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
8190         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
8191     }
8192 
8193     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
8194         uint8_t fwk_lensState = *lensState;
8195         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
8196     }
8197 
8198     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
8199         uint32_t ab_mode = *hal_ab_mode;
8200         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
8201                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
8202               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
8203         }
8204         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8205                 ab_mode);
8206         if (NAME_NOT_FOUND != val) {
8207             uint8_t fwk_ab_mode = (uint8_t)val;
8208             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
8209         }
8210     }
8211 
8212     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
8213         int val = lookupFwkName(SCENE_MODES_MAP,
8214                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
8215         if (NAME_NOT_FOUND != val) {
8216             uint8_t fwkBestshotMode = (uint8_t)val;
8217             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
8218             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
8219         } else {
8220             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
8221         }
8222     }
8223 
8224     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
8225          uint8_t fwk_mode = (uint8_t) *mode;
8226          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
8227     }
8228 
8229     /* Constant metadata values to be update*/
8230 
8231     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8232     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8233 
8234     int32_t hotPixelMap[2];
8235     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
8236 
8237     // CDS
8238     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
8239         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
8240     }
8241 
8242     IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
8243         int32_t fwk_hdr;
8244         int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
8245         if(*vhdr == CAM_SENSOR_HDR_OFF) {
8246             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
8247         } else {
8248             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
8249         }
8250 
8251         if(fwk_hdr != curr_hdr_state) {
8252            LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
8253            if(fwk_hdr)
8254               mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8255            else
8256               mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8257         }
8258         camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
8259     }
8260 
8261     //binning correction
8262     IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
8263             CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
8264         int32_t fwk_bin_mode = (int32_t) *bin_correction;
8265         camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
8266     }
8267 
8268     IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
8269         int32_t fwk_ir = (int32_t) *ir;
8270         int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
8271         int8_t is_ir_on = 0;
8272 
8273         (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
8274         if(is_ir_on != curr_ir_state) {
8275            LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
8276            if(is_ir_on)
8277               mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
8278            else
8279               mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
8280         }
8281         camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
8282     }
8283 
8284     // AEC SPEED
8285     IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
8286         camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8287     }
8288 
8289     // AWB SPEED
8290     IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8291         camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8292     }
8293 
8294     // TNR
8295     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8296         uint8_t tnr_enable       = tnr->denoise_enable;
8297         int32_t tnr_process_type = (int32_t)tnr->process_plates;
8298         int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8299         int8_t is_tnr_on = 0;
8300 
8301         (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8302         if(is_tnr_on != curr_tnr_state) {
8303            LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8304            if(is_tnr_on)
8305               mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8306            else
8307               mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8308         }
8309 
8310         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8311         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8312     }
8313 
8314     // Reprocess crop data
8315     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8316         uint8_t cnt = crop_data->num_of_streams;
8317         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8318             // mm-qcamera-daemon only posts crop_data for streams
8319             // not linked to pproc. So no valid crop metadata is not
8320             // necessarily an error case.
8321             LOGD("No valid crop metadata entries");
8322         } else {
8323             uint32_t reproc_stream_id;
8324             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8325                 LOGD("No reprocessible stream found, ignore crop data");
8326             } else {
8327                 int rc = NO_ERROR;
8328                 Vector<int32_t> roi_map;
8329                 int32_t *crop = new int32_t[cnt*4];
8330                 if (NULL == crop) {
8331                    rc = NO_MEMORY;
8332                 }
8333                 if (NO_ERROR == rc) {
8334                     int32_t streams_found = 0;
8335                     for (size_t i = 0; i < cnt; i++) {
8336                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8337                             if (pprocDone) {
8338                                 // HAL already does internal reprocessing,
8339                                 // either via reprocessing before JPEG encoding,
8340                                 // or offline postprocessing for pproc bypass case.
8341                                 crop[0] = 0;
8342                                 crop[1] = 0;
8343                                 crop[2] = mInputStreamInfo.dim.width;
8344                                 crop[3] = mInputStreamInfo.dim.height;
8345                             } else {
8346                                 crop[0] = crop_data->crop_info[i].crop.left;
8347                                 crop[1] = crop_data->crop_info[i].crop.top;
8348                                 crop[2] = crop_data->crop_info[i].crop.width;
8349                                 crop[3] = crop_data->crop_info[i].crop.height;
8350                             }
8351                             roi_map.add(crop_data->crop_info[i].roi_map.left);
8352                             roi_map.add(crop_data->crop_info[i].roi_map.top);
8353                             roi_map.add(crop_data->crop_info[i].roi_map.width);
8354                             roi_map.add(crop_data->crop_info[i].roi_map.height);
8355                             streams_found++;
8356                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8357                                     crop[0], crop[1], crop[2], crop[3]);
8358                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8359                                     crop_data->crop_info[i].roi_map.left,
8360                                     crop_data->crop_info[i].roi_map.top,
8361                                     crop_data->crop_info[i].roi_map.width,
8362                                     crop_data->crop_info[i].roi_map.height);
8363                             break;
8364 
8365                        }
8366                     }
8367                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8368                             &streams_found, 1);
8369                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
8370                             crop, (size_t)(streams_found * 4));
8371                     if (roi_map.array()) {
8372                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8373                                 roi_map.array(), roi_map.size());
8374                     }
8375                }
8376                if (crop) {
8377                    delete [] crop;
8378                }
8379             }
8380         }
8381     }
8382 
8383     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8384         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8385         // so hardcoding the CAC result to OFF mode.
8386         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8387         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8388     } else {
8389         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8390             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8391                     *cacMode);
8392             if (NAME_NOT_FOUND != val) {
8393                 uint8_t resultCacMode = (uint8_t)val;
8394                 // check whether CAC result from CB is equal to Framework set CAC mode
8395                 // If not equal then set the CAC mode came in corresponding request
8396                 if (pendingRequest.fwkCacMode != resultCacMode) {
8397                     resultCacMode = pendingRequest.fwkCacMode;
8398                 }
8399                 //Check if CAC is disabled by property
8400                 if (m_cacModeDisabled) {
8401                     resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8402                 }
8403 
8404                 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
8405                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8406             } else {
8407                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8408             }
8409         }
8410     }
8411 
8412     // Post blob of cam_cds_data through vendor tag.
8413     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8414         uint8_t cnt = cdsInfo->num_of_streams;
8415         cam_cds_data_t cdsDataOverride;
8416         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8417         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8418         cdsDataOverride.num_of_streams = 1;
8419         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8420             uint32_t reproc_stream_id;
8421             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8422                 LOGD("No reprocessible stream found, ignore cds data");
8423             } else {
8424                 for (size_t i = 0; i < cnt; i++) {
8425                     if (cdsInfo->cds_info[i].stream_id ==
8426                             reproc_stream_id) {
8427                         cdsDataOverride.cds_info[0].cds_enable =
8428                                 cdsInfo->cds_info[i].cds_enable;
8429                         break;
8430                     }
8431                 }
8432             }
8433         } else {
8434             LOGD("Invalid stream count %d in CDS_DATA", cnt);
8435         }
8436         camMetadata.update(QCAMERA3_CDS_INFO,
8437                 (uint8_t *)&cdsDataOverride,
8438                 sizeof(cam_cds_data_t));
8439     }
8440 
8441     // Ldaf calibration data
8442     if (!mLdafCalibExist) {
8443         IF_META_AVAILABLE(uint32_t, ldafCalib,
8444                 CAM_INTF_META_LDAF_EXIF, metadata) {
8445             mLdafCalibExist = true;
8446             mLdafCalib[0] = ldafCalib[0];
8447             mLdafCalib[1] = ldafCalib[1];
8448             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8449                     ldafCalib[0], ldafCalib[1]);
8450         }
8451     }
8452 
8453     // EXIF debug data through vendor tag
8454     /*
8455      * Mobicat Mask can assume 3 values:
8456      * 1 refers to Mobicat data,
8457      * 2 refers to Stats Debug and Exif Debug Data
8458      * 3 refers to Mobicat and Stats Debug Data
8459      * We want to make sure that we are sending Exif debug data
8460      * only when Mobicat Mask is 2.
8461      */
8462     if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8463         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8464                 (uint8_t *)(void *)mExifParams.debug_params,
8465                 sizeof(mm_jpeg_debug_exif_params_t));
8466     }
8467 
8468     // Reprocess and DDM debug data through vendor tag
8469     cam_reprocess_info_t repro_info;
8470     memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
8471     IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8472             CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
8473         memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
8474     }
8475     IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8476             CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
8477         memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
8478     }
8479     IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8480             CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
8481         memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
8482     }
8483     IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8484             CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
8485         memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
8486     }
8487     IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8488             CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
8489         memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
8490     }
8491     IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
8492         memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
8493     }
8494     IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8495             CAM_INTF_PARM_ROTATION, metadata) {
8496         memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
8497     }
8498     IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8499         memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8500     }
8501     IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8502         memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8503     }
8504     camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8505         (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
8506 
8507     // INSTANT AEC MODE
8508     IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8509             CAM_INTF_PARM_INSTANT_AEC, metadata) {
8510         camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8511     }
8512 
8513     // AF scene change
8514     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8515         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8516         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
8517     } else {
8518         uint8_t noSceneChange = 0;
8519         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, &noSceneChange, 1);
8520         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, &noSceneChange, 1);
8521         LOGE("Missing AF_SCENE_CHANGE metadata!");
8522     }
8523 
8524     // Enable ZSL
8525     if (enableZsl != nullptr) {
8526         uint8_t value = *enableZsl ?
8527                 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8528         camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8529     }
8530 
8531     camMetadata.update(ANDROID_STATISTICS_OIS_DATA_MODE, &pendingRequest.requestedOisDataMode, 1);
8532 
8533     // OIS Data
8534     IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8535         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8536             &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8537         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8538             frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8539         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8540             frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8541         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8542             frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8543 
8544         if (pendingRequest.requestedOisDataMode == ANDROID_STATISTICS_OIS_DATA_MODE_ON) {
8545             int64_t timeDiff = pendingRequest.timestamp -
8546                     frame_ois_data->frame_sof_timestamp_boottime;
8547 
8548             std::vector<int64_t> oisTimestamps;
8549 
8550             for (int32_t i = 0; i < frame_ois_data->num_ois_sample; i++) {
8551                 oisTimestamps.push_back(
8552                         frame_ois_data->ois_sample_timestamp_boottime[i] + timeDiff);
8553             }
8554 
8555             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8556                     oisTimestamps.data(), frame_ois_data->num_ois_sample);
8557             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8558                     frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8559             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8560                     frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8561         } else {
8562             // If OIS data mode is OFF, add NULL for OIS keys.
8563             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8564                     frame_ois_data->ois_sample_timestamp_boottime, 0);
8565             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8566                     frame_ois_data->ois_sample_shift_pixel_x, 0);
8567             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8568                     frame_ois_data->ois_sample_shift_pixel_y, 0);
8569         }
8570     }
8571 
8572     // DevCamDebug metadata translateFromHalMetadata AEC MOTION
8573     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
8574             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
8575         float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
8576         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
8577                            &fwk_DevCamDebug_aec_camera_motion_dx, 1);
8578     }
8579     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
8580             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
8581         float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
8582         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
8583                            &fwk_DevCamDebug_aec_camera_motion_dy, 1);
8584     }
8585     IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
8586             CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
8587         float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
8588         camMetadata.update(NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
8589                            &fwk_DevCamDebug_aec_subject_motion, 1);
8590     }
8591 
8592     // Camera lens calibration dynamic fields, for back camera. Same values as for static metadata.
8593     if (mCameraId == 0) {
8594         const camera_metadata_t *staticInfo = gStaticMetadata[mCameraId];
8595         camera_metadata_ro_entry_t rotation, translation, intrinsics, distortion, reference;
8596         int res;
8597         bool fail = false;
8598         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_ROTATION,
8599                 &rotation);
8600         if (res != 0) {
8601             fail = true;
8602         }
8603         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_TRANSLATION,
8604                 &translation);
8605         if (res != 0) {
8606             fail = true;
8607         }
8608         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_INTRINSIC_CALIBRATION,
8609                 &intrinsics);
8610         if (res != 0) {
8611             fail = true;
8612         }
8613         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_DISTORTION,
8614                 &distortion);
8615         if (res != 0) {
8616             fail = true;
8617         }
8618         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_REFERENCE,
8619                 &reference);
8620         if (res != 0) {
8621             fail = true;
8622         }
8623 
8624         if (!fail) {
8625             camMetadata.update(ANDROID_LENS_POSE_ROTATION,
8626                     rotation.data.f, rotation.count);
8627             camMetadata.update(ANDROID_LENS_POSE_TRANSLATION,
8628                     translation.data.f, translation.count);
8629             camMetadata.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
8630                     intrinsics.data.f, intrinsics.count);
8631             camMetadata.update(ANDROID_LENS_DISTORTION,
8632                     distortion.data.f, distortion.count);
8633             camMetadata.update(ANDROID_LENS_POSE_REFERENCE,
8634                     reference.data.u8, reference.count);
8635         }
8636     }
8637 
8638     resultMetadata = camMetadata.release();
8639     return resultMetadata;
8640 }
8641 
8642 /*===========================================================================
8643  * FUNCTION   : saveExifParams
8644  *
8645  * DESCRIPTION:
8646  *
8647  * PARAMETERS :
8648  *   @metadata : metadata information from callback
8649  *
8650  * RETURN     : none
8651  *
8652  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)8653 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8654 {
8655     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8656             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8657         if (mExifParams.debug_params) {
8658             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8659             mExifParams.debug_params->ae_debug_params_valid = TRUE;
8660         }
8661     }
8662     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8663             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8664         if (mExifParams.debug_params) {
8665             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8666             mExifParams.debug_params->awb_debug_params_valid = TRUE;
8667         }
8668     }
8669     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8670             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8671         if (mExifParams.debug_params) {
8672             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8673             mExifParams.debug_params->af_debug_params_valid = TRUE;
8674         }
8675     }
8676     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8677             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8678         if (mExifParams.debug_params) {
8679             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8680             mExifParams.debug_params->asd_debug_params_valid = TRUE;
8681         }
8682     }
8683     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8684             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8685         if (mExifParams.debug_params) {
8686             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8687             mExifParams.debug_params->stats_debug_params_valid = TRUE;
8688         }
8689     }
8690     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8691             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8692         if (mExifParams.debug_params) {
8693             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8694             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8695         }
8696     }
8697     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8698             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8699         if (mExifParams.debug_params) {
8700             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8701             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8702         }
8703     }
8704     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8705             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8706         if (mExifParams.debug_params) {
8707             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8708             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8709         }
8710     }
8711 }
8712 
8713 /*===========================================================================
8714  * FUNCTION   : get3AExifParams
8715  *
8716  * DESCRIPTION:
8717  *
8718  * PARAMETERS : none
8719  *
8720  *
8721  * RETURN     : mm_jpeg_exif_params_t
8722  *
8723  *==========================================================================*/
get3AExifParams()8724 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8725 {
8726     return mExifParams;
8727 }
8728 
8729 /*===========================================================================
8730  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
8731  *
8732  * DESCRIPTION:
8733  *
8734  * PARAMETERS :
8735  *   @metadata : metadata information from callback
8736  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8737  *                               urgent metadata in a batch. Always true for
8738  *                               non-batch mode.
8739  *   @requestIter:         Pending request iterator
8740  *   @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8741  *                         i.e. even though it doesn't map to a valid partial
8742  *                         frame number, its metadata entries should be kept.
8743  * RETURN     : camera_metadata_t*
8744  *              metadata in a format specified by fwk
8745  *==========================================================================*/
8746 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,const pendingRequestIterator requestIter,bool isJumpstartMetadata)8747 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
8748                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8749                                  const pendingRequestIterator requestIter, bool isJumpstartMetadata)
8750 {
8751     CameraMetadata camMetadata;
8752     camera_metadata_t *resultMetadata;
8753     uint32_t frame_number = requestIter->frame_number;
8754 
8755     if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
8756         /* In batch mode, use empty metadata if this is not the last in batch
8757          */
8758         resultMetadata = allocate_camera_metadata(0, 0);
8759         return resultMetadata;
8760     }
8761 
8762     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8763         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8764         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8765         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8766     }
8767 
8768     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8769         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8770                 &aecTrigger->trigger, 1);
8771         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8772                 &aecTrigger->trigger_id, 1);
8773         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8774                  aecTrigger->trigger);
8775         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8776                 aecTrigger->trigger_id);
8777     }
8778 
8779     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8780         uint8_t fwk_ae_state = (uint8_t) *ae_state;
8781         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8782         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8783     }
8784 
8785     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8786         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8787         if (NAME_NOT_FOUND != val) {
8788             uint8_t fwkAfMode = (uint8_t)val;
8789             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8790             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8791         } else {
8792             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8793                     val);
8794         }
8795     }
8796 
8797     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8798         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8799             af_trigger->trigger);
8800         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8801             af_trigger->trigger_id);
8802 
8803         IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8804             mAfTrigger = *af_trigger;
8805             uint32_t fwk_AfState = (uint32_t) *afState;
8806 
8807             // If this is the result for a new trigger, check if there is new early
8808             // af state. If there is, use the last af state for all results
8809             // preceding current partial frame number.
8810             for (auto & pendingRequest : mPendingRequestsList) {
8811                 if (pendingRequest.frame_number < frame_number) {
8812                     pendingRequest.focusStateValid = true;
8813                     pendingRequest.focusState = fwk_AfState;
8814                 } else if (pendingRequest.frame_number == frame_number) {
8815                     IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8816                         // Check if early AF state for trigger exists. If yes, send AF state as
8817                         // partial result for better latency.
8818                         uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8819                         pendingRequest.focusStateSent = true;
8820                         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8821                         LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8822                                  frame_number, fwkEarlyAfState);
8823                     }
8824                 }
8825             }
8826         }
8827     }
8828     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8829         &mAfTrigger.trigger, 1);
8830     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8831 
8832     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8833         /*af regions*/
8834         cam_rect_t hAfRect = hAfRegions->rect;
8835         int32_t afRegions[REGIONS_TUPLE_COUNT];
8836         // Adjust crop region from sensor output coordinate system to active
8837         // array coordinate system.
8838         mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8839                 hAfRect.width, hAfRect.height, requestIter->zoomRatio);
8840 
8841         convertToRegions(hAfRect, afRegions, hAfRegions->weight);
8842         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8843                 REGIONS_TUPLE_COUNT);
8844         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8845                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8846                 hAfRect.left, hAfRect.top, hAfRect.width,
8847                 hAfRect.height);
8848     }
8849 
8850     // AF region confidence
8851     IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8852         camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8853     }
8854 
8855     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8856         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8857                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8858         if (NAME_NOT_FOUND != val) {
8859             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8860             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8861             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8862         } else {
8863             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8864         }
8865     }
8866 
8867     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8868     uint32_t aeMode = CAM_AE_MODE_MAX;
8869     int32_t flashMode = CAM_FLASH_MODE_MAX;
8870     int32_t redeye = -1;
8871     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8872         aeMode = *pAeMode;
8873     }
8874     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8875         flashMode = *pFlashMode;
8876     }
8877     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8878         redeye = *pRedeye;
8879     }
8880 
8881     if (1 == redeye) {
8882         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8883         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8884     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8885         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8886                 flashMode);
8887         if (NAME_NOT_FOUND != val) {
8888             fwk_aeMode = (uint8_t)val;
8889             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8890         } else {
8891             LOGE("Unsupported flash mode %d", flashMode);
8892         }
8893     } else if (aeMode == CAM_AE_MODE_ON) {
8894         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8895         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8896     } else if (aeMode == CAM_AE_MODE_OFF) {
8897         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8898         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8899     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8900         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
8901         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8902     } else {
8903         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8904               "flashMode:%d, aeMode:%u!!!",
8905                  redeye, flashMode, aeMode);
8906     }
8907     if (mInstantAEC) {
8908         // Increment frame Idx count untill a bound reached for instant AEC.
8909         mInstantAecFrameIdxCount++;
8910         IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8911                 CAM_INTF_META_AEC_INFO, metadata) {
8912             LOGH("ae_params->settled = %d",ae_params->settled);
8913             // If AEC settled, or if number of frames reached bound value,
8914             // should reset instant AEC.
8915             if (ae_params->settled ||
8916                     (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8917                 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8918                 mInstantAEC = false;
8919                 mResetInstantAEC = true;
8920                 mInstantAecFrameIdxCount = 0;
8921             }
8922         }
8923     }
8924 
8925     IF_META_AVAILABLE(int32_t, af_tof_confidence,
8926             CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8927         IF_META_AVAILABLE(int32_t, af_tof_distance,
8928                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8929             int32_t fwk_af_tof_confidence = *af_tof_confidence;
8930             int32_t fwk_af_tof_distance = *af_tof_distance;
8931             if (fwk_af_tof_confidence == 1) {
8932                 mSceneDistance = fwk_af_tof_distance;
8933             } else {
8934                 mSceneDistance = -1;
8935             }
8936             LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8937                      fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8938         }
8939     }
8940     camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8941 
8942     resultMetadata = camMetadata.release();
8943     return resultMetadata;
8944 }
8945 
8946 /*===========================================================================
8947  * FUNCTION   : dumpMetadataToFile
8948  *
8949  * DESCRIPTION: Dumps tuning metadata to file system
8950  *
8951  * PARAMETERS :
8952  *   @meta           : tuning metadata
8953  *   @dumpFrameCount : current dump frame count
8954  *   @enabled        : Enable mask
8955  *
8956  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)8957 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8958                                                    uint32_t &dumpFrameCount,
8959                                                    bool enabled,
8960                                                    const char *type,
8961                                                    uint32_t frameNumber)
8962 {
8963     //Some sanity checks
8964     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8965         LOGE("Tuning sensor data size bigger than expected %d: %d",
8966               meta.tuning_sensor_data_size,
8967               TUNING_SENSOR_DATA_MAX);
8968         return;
8969     }
8970 
8971     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8972         LOGE("Tuning VFE data size bigger than expected %d: %d",
8973               meta.tuning_vfe_data_size,
8974               TUNING_VFE_DATA_MAX);
8975         return;
8976     }
8977 
8978     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8979         LOGE("Tuning CPP data size bigger than expected %d: %d",
8980               meta.tuning_cpp_data_size,
8981               TUNING_CPP_DATA_MAX);
8982         return;
8983     }
8984 
8985     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8986         LOGE("Tuning CAC data size bigger than expected %d: %d",
8987               meta.tuning_cac_data_size,
8988               TUNING_CAC_DATA_MAX);
8989         return;
8990     }
8991     //
8992 
8993     if(enabled){
8994         char timeBuf[FILENAME_MAX];
8995         char buf[FILENAME_MAX];
8996         memset(buf, 0, sizeof(buf));
8997         memset(timeBuf, 0, sizeof(timeBuf));
8998         time_t current_time;
8999         struct tm * timeinfo;
9000         time (&current_time);
9001         timeinfo = localtime (&current_time);
9002         if (timeinfo != NULL) {
9003             strftime (timeBuf, sizeof(timeBuf),
9004                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
9005         }
9006         String8 filePath(timeBuf);
9007         snprintf(buf,
9008                 sizeof(buf),
9009                 "%dm_%s_%d.bin",
9010                 dumpFrameCount,
9011                 type,
9012                 frameNumber);
9013         filePath.append(buf);
9014         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
9015         if (file_fd >= 0) {
9016             ssize_t written_len = 0;
9017             meta.tuning_data_version = TUNING_DATA_VERSION;
9018             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
9019             written_len += write(file_fd, data, sizeof(uint32_t));
9020             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
9021             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
9022             written_len += write(file_fd, data, sizeof(uint32_t));
9023             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
9024             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
9025             written_len += write(file_fd, data, sizeof(uint32_t));
9026             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
9027             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
9028             written_len += write(file_fd, data, sizeof(uint32_t));
9029             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
9030             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
9031             written_len += write(file_fd, data, sizeof(uint32_t));
9032             meta.tuning_mod3_data_size = 0;
9033             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
9034             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
9035             written_len += write(file_fd, data, sizeof(uint32_t));
9036             size_t total_size = meta.tuning_sensor_data_size;
9037             data = (void *)((uint8_t *)&meta.data);
9038             written_len += write(file_fd, data, total_size);
9039             total_size = meta.tuning_vfe_data_size;
9040             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
9041             written_len += write(file_fd, data, total_size);
9042             total_size = meta.tuning_cpp_data_size;
9043             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
9044             written_len += write(file_fd, data, total_size);
9045             total_size = meta.tuning_cac_data_size;
9046             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
9047             written_len += write(file_fd, data, total_size);
9048             close(file_fd);
9049         }else {
9050             LOGE("fail to open file for metadata dumping");
9051         }
9052     }
9053 }
9054 
9055 /*===========================================================================
9056  * FUNCTION   : cleanAndSortStreamInfo
9057  *
9058  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
9059  *              and sort them such that raw stream is at the end of the list
9060  *              This is a workaround for camera daemon constraint.
9061  *
9062  * PARAMETERS : None
9063  *
9064  *==========================================================================*/
cleanAndSortStreamInfo()9065 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
9066 {
9067     List<stream_info_t *> newStreamInfo;
9068 
9069     /*clean up invalid streams*/
9070     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
9071             it != mStreamInfo.end();) {
9072         if(((*it)->status) == INVALID){
9073             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
9074             delete channel;
9075             free(*it);
9076             it = mStreamInfo.erase(it);
9077         } else {
9078             it++;
9079         }
9080     }
9081 
9082     // Move preview/video/callback/snapshot streams into newList
9083     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9084             it != mStreamInfo.end();) {
9085         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
9086                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
9087                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
9088             newStreamInfo.push_back(*it);
9089             it = mStreamInfo.erase(it);
9090         } else
9091             it++;
9092     }
9093     // Move raw streams into newList
9094     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9095             it != mStreamInfo.end();) {
9096         newStreamInfo.push_back(*it);
9097         it = mStreamInfo.erase(it);
9098     }
9099 
9100     mStreamInfo = newStreamInfo;
9101 
9102     // Make sure that stream IDs are unique.
9103     uint32_t id = 0;
9104     for (auto streamInfo : mStreamInfo) {
9105         streamInfo->id = id++;
9106     }
9107 
9108 }
9109 
9110 /*===========================================================================
9111  * FUNCTION   : extractJpegMetadata
9112  *
9113  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
9114  *              JPEG metadata is cached in HAL, and return as part of capture
9115  *              result when metadata is returned from camera daemon.
9116  *
9117  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
9118  *              @request:      capture request
9119  *
9120  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)9121 void QCamera3HardwareInterface::extractJpegMetadata(
9122         CameraMetadata& jpegMetadata,
9123         const camera3_capture_request_t *request)
9124 {
9125     CameraMetadata frame_settings;
9126     frame_settings = request->settings;
9127 
9128     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
9129         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
9130                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
9131                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
9132 
9133     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
9134         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
9135                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
9136                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
9137 
9138     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
9139         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
9140                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
9141                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
9142 
9143     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
9144         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
9145                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
9146                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
9147 
9148     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
9149         jpegMetadata.update(ANDROID_JPEG_QUALITY,
9150                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
9151                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
9152 
9153     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
9154         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
9155                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
9156                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
9157 
9158     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9159         int32_t thumbnail_size[2];
9160         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9161         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9162         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9163             int32_t orientation =
9164                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9165             if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
9166                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
9167                int32_t temp;
9168                temp = thumbnail_size[0];
9169                thumbnail_size[0] = thumbnail_size[1];
9170                thumbnail_size[1] = temp;
9171             }
9172          }
9173          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
9174                 thumbnail_size,
9175                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9176     }
9177 
9178 }
9179 
9180 /*===========================================================================
9181  * FUNCTION   : convertToRegions
9182  *
9183  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
9184  *
9185  * PARAMETERS :
9186  *   @rect   : cam_rect_t struct to convert
9187  *   @region : int32_t destination array
9188  *   @weight : if we are converting from cam_area_t, weight is valid
9189  *             else weight = -1
9190  *
9191  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)9192 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
9193         int32_t *region, int weight)
9194 {
9195     region[FACE_LEFT] = rect.left;
9196     region[FACE_TOP] = rect.top;
9197     region[FACE_RIGHT] = rect.left + rect.width;
9198     region[FACE_BOTTOM] = rect.top + rect.height;
9199     if (weight > -1) {
9200         region[FACE_WEIGHT] = weight;
9201     }
9202 }
9203 
9204 /*===========================================================================
9205  * FUNCTION   : convertFromRegions
9206  *
9207  * DESCRIPTION: helper method to convert from array to cam_rect_t
9208  *
9209  * PARAMETERS :
9210  *   @rect   : cam_rect_t struct to convert
9211  *   @region : int32_t destination array
9212  *   @weight : if we are converting from cam_area_t, weight is valid
9213  *             else weight = -1
9214  *
9215  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const CameraMetadata & frame_settings,uint32_t tag)9216 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
9217         const CameraMetadata &frame_settings, uint32_t tag)
9218 {
9219     int32_t x_min = frame_settings.find(tag).data.i32[0];
9220     int32_t y_min = frame_settings.find(tag).data.i32[1];
9221     int32_t x_max = frame_settings.find(tag).data.i32[2];
9222     int32_t y_max = frame_settings.find(tag).data.i32[3];
9223     roi.weight = frame_settings.find(tag).data.i32[4];
9224     roi.rect.left = x_min;
9225     roi.rect.top = y_min;
9226     roi.rect.width = x_max - x_min;
9227     roi.rect.height = y_max - y_min;
9228 }
9229 
9230 /*===========================================================================
9231  * FUNCTION   : resetIfNeededROI
9232  *
9233  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
9234  *              crop region
9235  *
9236  * PARAMETERS :
9237  *   @roi       : cam_area_t struct to resize
9238  *   @scalerCropRegion : cam_crop_region_t region to compare against
9239  *
9240  *
9241  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)9242 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
9243                                                  const cam_crop_region_t* scalerCropRegion)
9244 {
9245     int32_t roi_x_max = roi->rect.width + roi->rect.left;
9246     int32_t roi_y_max = roi->rect.height + roi->rect.top;
9247     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
9248     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
9249 
9250     /* According to spec weight = 0 is used to indicate roi needs to be disabled
9251      * without having this check the calculations below to validate if the roi
9252      * is inside scalar crop region will fail resulting in the roi not being
9253      * reset causing algorithm to continue to use stale roi window
9254      */
9255     if (roi->weight == 0) {
9256         return true;
9257     }
9258 
9259     if ((roi_x_max < scalerCropRegion->left) ||
9260         // right edge of roi window is left of scalar crop's left edge
9261         (roi_y_max < scalerCropRegion->top)  ||
9262         // bottom edge of roi window is above scalar crop's top edge
9263         (roi->rect.left > crop_x_max) ||
9264         // left edge of roi window is beyond(right) of scalar crop's right edge
9265         (roi->rect.top > crop_y_max)){
9266         // top edge of roi windo is above scalar crop's top edge
9267         return false;
9268     }
9269     if (roi->rect.left < scalerCropRegion->left) {
9270         roi->rect.left = scalerCropRegion->left;
9271     }
9272     if (roi->rect.top < scalerCropRegion->top) {
9273         roi->rect.top = scalerCropRegion->top;
9274     }
9275     if (roi_x_max > crop_x_max) {
9276         roi_x_max = crop_x_max;
9277     }
9278     if (roi_y_max > crop_y_max) {
9279         roi_y_max = crop_y_max;
9280     }
9281     roi->rect.width = roi_x_max - roi->rect.left;
9282     roi->rect.height = roi_y_max - roi->rect.top;
9283     return true;
9284 }
9285 
9286 /*===========================================================================
9287  * FUNCTION   : convertLandmarks
9288  *
9289  * DESCRIPTION: helper method to extract the landmarks from face detection info
9290  *
9291  * PARAMETERS :
9292  *   @landmark_data : input landmark data to be converted
9293  *   @landmarks : int32_t destination array
9294  *
9295  *
9296  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)9297 void QCamera3HardwareInterface::convertLandmarks(
9298         cam_face_landmarks_info_t landmark_data,
9299         int32_t *landmarks)
9300 {
9301     if (landmark_data.is_left_eye_valid) {
9302         landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
9303         landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
9304     } else {
9305         landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9306         landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9307     }
9308 
9309     if (landmark_data.is_right_eye_valid) {
9310         landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
9311         landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
9312     } else {
9313         landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9314         landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9315     }
9316 
9317     if (landmark_data.is_mouth_valid) {
9318         landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
9319         landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
9320     } else {
9321         landmarks[MOUTH_X] = FACE_INVALID_POINT;
9322         landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9323     }
9324 }
9325 
9326 /*===========================================================================
9327  * FUNCTION   : setInvalidLandmarks
9328  *
9329  * DESCRIPTION: helper method to set invalid landmarks
9330  *
9331  * PARAMETERS :
9332  *   @landmarks : int32_t destination array
9333  *
9334  *
9335  *==========================================================================*/
setInvalidLandmarks(int32_t * landmarks)9336 void QCamera3HardwareInterface::setInvalidLandmarks(
9337         int32_t *landmarks)
9338 {
9339     landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9340     landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9341     landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9342     landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9343     landmarks[MOUTH_X] = FACE_INVALID_POINT;
9344     landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9345 }
9346 
9347 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
9348 
9349 /*===========================================================================
9350  * FUNCTION   : getCapabilities
9351  *
9352  * DESCRIPTION: query camera capability from back-end
9353  *
9354  * PARAMETERS :
9355  *   @ops  : mm-interface ops structure
9356  *   @cam_handle  : camera handle for which we need capability
9357  *
9358  * RETURN     : ptr type of capability structure
9359  *              capability for success
9360  *              NULL for failure
9361  *==========================================================================*/
getCapabilities(mm_camera_ops_t * ops,uint32_t cam_handle)9362 cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
9363         uint32_t cam_handle)
9364 {
9365     int rc = NO_ERROR;
9366     QCamera3HeapMemory *capabilityHeap = NULL;
9367     cam_capability_t *cap_ptr = NULL;
9368 
9369     if (ops == NULL) {
9370         LOGE("Invalid arguments");
9371         return NULL;
9372     }
9373 
9374     capabilityHeap = new QCamera3HeapMemory(1);
9375     if (capabilityHeap == NULL) {
9376         LOGE("creation of capabilityHeap failed");
9377         return NULL;
9378     }
9379 
9380     /* Allocate memory for capability buffer */
9381     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
9382     if(rc != OK) {
9383         LOGE("No memory for cappability");
9384         goto allocate_failed;
9385     }
9386 
9387     /* Map memory for capability buffer */
9388     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9389 
9390     rc = ops->map_buf(cam_handle,
9391             CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9392             sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9393     if(rc < 0) {
9394         LOGE("failed to map capability buffer");
9395         rc = FAILED_TRANSACTION;
9396         goto map_failed;
9397     }
9398 
9399     /* Query Capability */
9400     rc = ops->query_capability(cam_handle);
9401     if(rc < 0) {
9402         LOGE("failed to query capability");
9403         rc = FAILED_TRANSACTION;
9404         goto query_failed;
9405     }
9406 
9407     cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9408     if (cap_ptr == NULL) {
9409         LOGE("out of memory");
9410         rc = NO_MEMORY;
9411         goto query_failed;
9412     }
9413 
9414     memset(cap_ptr, 0, sizeof(cam_capability_t));
9415     memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9416 
9417     int index;
9418     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9419         cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9420         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9421         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9422     }
9423 
9424 query_failed:
9425     ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9426 map_failed:
9427     capabilityHeap->deallocate();
9428 allocate_failed:
9429     delete capabilityHeap;
9430 
9431     if (rc != NO_ERROR) {
9432         return NULL;
9433     } else {
9434         return cap_ptr;
9435     }
9436 }
9437 
9438 /*===========================================================================
9439  * FUNCTION   : initCapabilities
9440  *
9441  * DESCRIPTION: initialize camera capabilities in static data struct
9442  *
9443  * PARAMETERS :
9444  *   @cameraId  : camera Id
9445  *
9446  * RETURN     : int32_t type of status
9447  *              NO_ERROR  -- success
9448  *              none-zero failure code
9449  *==========================================================================*/
initCapabilities(uint32_t cameraId)9450 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9451 {
9452     int rc = 0;
9453     mm_camera_vtbl_t *cameraHandle = NULL;
9454     uint32_t handle = 0;
9455 
9456     rc = camera_open((uint8_t)cameraId, &cameraHandle);
9457     if (rc) {
9458         LOGE("camera_open failed. rc = %d", rc);
9459         goto open_failed;
9460     }
9461     if (!cameraHandle) {
9462         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9463         goto open_failed;
9464     }
9465 
9466     handle = get_main_camera_handle(cameraHandle->camera_handle);
9467     gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9468     if (gCamCapability[cameraId] == NULL) {
9469         rc = FAILED_TRANSACTION;
9470         goto failed_op;
9471     }
9472 
9473     gCamCapability[cameraId]->camera_index = cameraId;
9474     if (is_dual_camera_by_idx(cameraId)) {
9475         handle = get_aux_camera_handle(cameraHandle->camera_handle);
9476         gCamCapability[cameraId]->aux_cam_cap =
9477                 getCapabilities(cameraHandle->ops, handle);
9478         if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9479             rc = FAILED_TRANSACTION;
9480             free(gCamCapability[cameraId]);
9481             goto failed_op;
9482         }
9483 
9484         // Copy the main camera capability to main_cam_cap struct
9485         gCamCapability[cameraId]->main_cam_cap =
9486                         (cam_capability_t *)malloc(sizeof(cam_capability_t));
9487         if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9488             LOGE("out of memory");
9489             rc = NO_MEMORY;
9490             goto failed_op;
9491         }
9492         memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9493                 sizeof(cam_capability_t));
9494     }
9495 failed_op:
9496     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9497     cameraHandle = NULL;
9498 open_failed:
9499     return rc;
9500 }
9501 
9502 /*==========================================================================
9503  * FUNCTION   : get3Aversion
9504  *
9505  * DESCRIPTION: get the Q3A S/W version
9506  *
9507  * PARAMETERS :
9508  *  @sw_version: Reference of Q3A structure which will hold version info upon
9509  *               return
9510  *
9511  * RETURN     : None
9512  *
9513  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)9514 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9515 {
9516     if(gCamCapability[mCameraId])
9517         sw_version = gCamCapability[mCameraId]->q3a_version;
9518     else
9519         LOGE("Capability structure NULL!");
9520 }
9521 
9522 
9523 /*===========================================================================
9524  * FUNCTION   : initParameters
9525  *
9526  * DESCRIPTION: initialize camera parameters
9527  *
9528  * PARAMETERS :
9529  *
9530  * RETURN     : int32_t type of status
9531  *              NO_ERROR  -- success
9532  *              none-zero failure code
9533  *==========================================================================*/
initParameters()9534 int QCamera3HardwareInterface::initParameters()
9535 {
9536     int rc = 0;
9537 
9538     //Allocate Set Param Buffer
9539     mParamHeap = new QCamera3HeapMemory(1);
9540     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9541     if(rc != OK) {
9542         rc = NO_MEMORY;
9543         LOGE("Failed to allocate SETPARM Heap memory");
9544         delete mParamHeap;
9545         mParamHeap = NULL;
9546         return rc;
9547     }
9548 
9549     //Map memory for parameters buffer
9550     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9551             CAM_MAPPING_BUF_TYPE_PARM_BUF,
9552             mParamHeap->getFd(0),
9553             sizeof(metadata_buffer_t),
9554             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9555     if(rc < 0) {
9556         LOGE("failed to map SETPARM buffer");
9557         rc = FAILED_TRANSACTION;
9558         mParamHeap->deallocate();
9559         delete mParamHeap;
9560         mParamHeap = NULL;
9561         return rc;
9562     }
9563 
9564     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9565 
9566     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9567     return rc;
9568 }
9569 
9570 /*===========================================================================
9571  * FUNCTION   : deinitParameters
9572  *
9573  * DESCRIPTION: de-initialize camera parameters
9574  *
9575  * PARAMETERS :
9576  *
9577  * RETURN     : NONE
9578  *==========================================================================*/
deinitParameters()9579 void QCamera3HardwareInterface::deinitParameters()
9580 {
9581     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9582             CAM_MAPPING_BUF_TYPE_PARM_BUF);
9583 
9584     mParamHeap->deallocate();
9585     delete mParamHeap;
9586     mParamHeap = NULL;
9587 
9588     mParameters = NULL;
9589 
9590     free(mPrevParameters);
9591     mPrevParameters = NULL;
9592 }
9593 
9594 /*===========================================================================
9595  * FUNCTION   : calcMaxJpegSize
9596  *
9597  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9598  *
9599  * PARAMETERS :
9600  *
9601  * RETURN     : max_jpeg_size
9602  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)9603 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9604 {
9605     size_t max_jpeg_size = 0;
9606     size_t temp_width, temp_height;
9607     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9608             MAX_SIZES_CNT);
9609     for (size_t i = 0; i < count; i++) {
9610         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9611         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9612         if (temp_width * temp_height > max_jpeg_size ) {
9613             max_jpeg_size = temp_width * temp_height;
9614         }
9615     }
9616     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9617     return max_jpeg_size;
9618 }
9619 
9620 /*===========================================================================
9621  * FUNCTION   : getMaxRawSize
9622  *
9623  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9624  *
9625  * PARAMETERS :
9626  *
9627  * RETURN     : Largest supported Raw Dimension
9628  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)9629 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9630 {
9631     int max_width = 0;
9632     cam_dimension_t maxRawSize;
9633 
9634     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9635     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9636         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9637             max_width = gCamCapability[camera_id]->raw_dim[i].width;
9638             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9639         }
9640     }
9641     return maxRawSize;
9642 }
9643 
9644 
9645 /*===========================================================================
9646  * FUNCTION   : calcMaxJpegDim
9647  *
9648  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9649  *
9650  * PARAMETERS :
9651  *
9652  * RETURN     : max_jpeg_dim
9653  *==========================================================================*/
calcMaxJpegDim()9654 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9655 {
9656     cam_dimension_t max_jpeg_dim;
9657     cam_dimension_t curr_jpeg_dim;
9658     max_jpeg_dim.width = 0;
9659     max_jpeg_dim.height = 0;
9660     curr_jpeg_dim.width = 0;
9661     curr_jpeg_dim.height = 0;
9662     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9663         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9664         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9665         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9666             max_jpeg_dim.width * max_jpeg_dim.height ) {
9667             max_jpeg_dim.width = curr_jpeg_dim.width;
9668             max_jpeg_dim.height = curr_jpeg_dim.height;
9669         }
9670     }
9671     return max_jpeg_dim;
9672 }
9673 
9674 /*===========================================================================
9675  * FUNCTION   : addStreamConfig
9676  *
9677  * DESCRIPTION: adds the stream configuration to the array
9678  *
9679  * PARAMETERS :
9680  * @available_stream_configs : pointer to stream configuration array
9681  * @scalar_format            : scalar format
9682  * @dim                      : configuration dimension
9683  * @config_type              : input or output configuration type
9684  *
9685  * RETURN     : NONE
9686  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)9687 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9688         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9689 {
9690     available_stream_configs.add(scalar_format);
9691     available_stream_configs.add(dim.width);
9692     available_stream_configs.add(dim.height);
9693     available_stream_configs.add(config_type);
9694 }
9695 
9696 /*===========================================================================
9697  * FUNCTION   : suppportBurstCapture
9698  *
9699  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9700  *
9701  * PARAMETERS :
9702  *   @cameraId  : camera Id
9703  *
9704  * RETURN     : true if camera supports BURST_CAPTURE
9705  *              false otherwise
9706  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)9707 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9708 {
9709     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9710     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9711     const int32_t highResWidth = 3264;
9712     const int32_t highResHeight = 2448;
9713 
9714     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9715         // Maximum resolution images cannot be captured at >= 10fps
9716         // -> not supporting BURST_CAPTURE
9717         return false;
9718     }
9719 
9720     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9721         // Maximum resolution images can be captured at >= 20fps
9722         // --> supporting BURST_CAPTURE
9723         return true;
9724     }
9725 
9726     // Find the smallest highRes resolution, or largest resolution if there is none
9727     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9728             MAX_SIZES_CNT);
9729     size_t highRes = 0;
9730     while ((highRes + 1 < totalCnt) &&
9731             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9732             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9733             highResWidth * highResHeight)) {
9734         highRes++;
9735     }
9736     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9737         return true;
9738     } else {
9739         return false;
9740     }
9741 }
9742 
9743 /*===========================================================================
9744  * FUNCTION   : getPDStatIndex
9745  *
9746  * DESCRIPTION: Return the meta raw phase detection statistics index if present
9747  *
9748  * PARAMETERS :
9749  *   @caps    : camera capabilities
9750  *
9751  * RETURN     : int32_t type
9752  *              non-negative - on success
9753  *              -1 - on failure
9754  *==========================================================================*/
getPDStatIndex(cam_capability_t * caps)9755 int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9756     if (nullptr == caps) {
9757         return -1;
9758     }
9759 
9760     uint32_t metaRawCount = caps->meta_raw_channel_count;
9761     int32_t ret = -1;
9762     for (size_t i = 0; i < metaRawCount; i++) {
9763         if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9764             ret = i;
9765             break;
9766         }
9767     }
9768 
9769     return ret;
9770 }
9771 
9772 /*===========================================================================
9773  * FUNCTION   : initStaticMetadata
9774  *
9775  * DESCRIPTION: initialize the static metadata
9776  *
9777  * PARAMETERS :
9778  *   @cameraId  : camera Id
9779  *
9780  * RETURN     : int32_t type of status
9781  *              0  -- success
9782  *              non-zero failure code
9783  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)9784 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9785 {
9786     int rc = 0;
9787     CameraMetadata staticInfo;
9788     size_t count = 0;
9789     bool limitedDevice = false;
9790     char prop[PROPERTY_VALUE_MAX];
9791     bool supportBurst = false;
9792     Vector<int32_t> available_characteristics_keys;
9793 
9794     supportBurst = supportBurstCapture(cameraId);
9795 
9796     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9797      * guaranteed or if min fps of max resolution is less than 20 fps, its
9798      * advertised as limited device*/
9799     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9800             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9801             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9802             !supportBurst;
9803 
9804     uint8_t supportedHwLvl = limitedDevice ?
9805             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
9806 #ifndef USE_HAL_3_3
9807             // LEVEL_3 - This device will support level 3.
9808             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9809 #else
9810             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
9811 #endif
9812 
9813     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9814             &supportedHwLvl, 1);
9815 
9816     bool facingBack = false;
9817     if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9818             (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9819         facingBack = true;
9820     }
9821     /*HAL 3 only*/
9822     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9823                     &gCamCapability[cameraId]->min_focus_distance, 1);
9824 
9825     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9826                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
9827 
9828     /*should be using focal lengths but sensor doesn't provide that info now*/
9829     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9830                       &gCamCapability[cameraId]->focal_length,
9831                       1);
9832 
9833     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9834             gCamCapability[cameraId]->apertures,
9835             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9836 
9837     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9838             gCamCapability[cameraId]->filter_densities,
9839             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9840 
9841 
9842     uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9843     size_t mode_count =
9844         MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9845     for (size_t i = 0; i < mode_count; i++) {
9846       available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9847     }
9848     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9849             available_opt_stab_modes, mode_count);
9850 
9851     int32_t lens_shading_map_size[] = {
9852             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9853             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9854     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9855                       lens_shading_map_size,
9856                       sizeof(lens_shading_map_size)/sizeof(int32_t));
9857 
9858     // Lens calibration for MOTION_TRACKING, back camera only
9859     if (cameraId == 0) {
9860 
9861         float poseRotation[4] = {1.0f, 0.f, 0.f, 0.f}; // quaternion rotation
9862         float poseTranslation[3] = {0.0f, 0.f, 0.f}; // xyz translation, meters
9863         uint8_t poseReference = ANDROID_LENS_POSE_REFERENCE_GYROSCOPE;
9864         // TODO: b/70565622 - these should have better identity values as a fallback
9865         float cameraIntrinsics[5] = {100.f, 100.f, 0.f, 1000, 1000}; // fx,fy,sx,cx,cy
9866         float radialDistortion[5] = {0.f, 0.f, 0.f, 0.f, 0.f}; // identity
9867 
9868         bool success = readSensorCalibration(
9869                 gCamCapability[cameraId]->active_array_size.width,
9870                 poseRotation, poseTranslation, cameraIntrinsics, radialDistortion);
9871         if (!success) {
9872             ALOGE("Using identity lens calibration values");
9873         }
9874         staticInfo.update(ANDROID_LENS_POSE_ROTATION,
9875                 poseRotation, sizeof(poseRotation)/sizeof(float));
9876         staticInfo.update(ANDROID_LENS_POSE_TRANSLATION,
9877                 poseTranslation, sizeof(poseTranslation)/sizeof(float));
9878         staticInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
9879                 cameraIntrinsics, sizeof(cameraIntrinsics)/sizeof(float));
9880         staticInfo.update(ANDROID_LENS_DISTORTION,
9881                 radialDistortion, sizeof(radialDistortion)/sizeof(float));
9882         staticInfo.update(ANDROID_LENS_POSE_REFERENCE,
9883                 &poseReference, sizeof(poseReference));
9884     }
9885 
9886     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9887             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9888 
9889     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9890             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9891 
9892     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9893             &gCamCapability[cameraId]->max_frame_duration, 1);
9894 
9895     camera_metadata_rational baseGainFactor = {
9896             gCamCapability[cameraId]->base_gain_factor.numerator,
9897             gCamCapability[cameraId]->base_gain_factor.denominator};
9898     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9899                       &baseGainFactor, 1);
9900 
9901     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9902                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9903 
9904     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9905             gCamCapability[cameraId]->pixel_array_size.height};
9906     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9907                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9908 
9909     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9910             gCamCapability[cameraId]->active_array_size.top,
9911             gCamCapability[cameraId]->active_array_size.width,
9912             gCamCapability[cameraId]->active_array_size.height};
9913     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9914             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9915 
9916     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9917             &gCamCapability[cameraId]->white_level, 1);
9918 
9919     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9920     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9921             gCamCapability[cameraId]->color_arrangement);
9922     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
9923             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
9924 
9925 #ifndef USE_HAL_3_3
9926     bool hasBlackRegions = false;
9927     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9928         LOGW("black_region_count: %d is bounded to %d",
9929             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9930         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9931     }
9932     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9933         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9934         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9935             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9936         }
9937         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9938                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9939         hasBlackRegions = true;
9940     }
9941 #endif
9942     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9943             &gCamCapability[cameraId]->flash_charge_duration, 1);
9944 
9945     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9946             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9947 
9948     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9949             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9950             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
9951     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9952             &timestampSource, 1);
9953 
9954     //update histogram vendor data
9955     staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
9956             &gCamCapability[cameraId]->histogram_size, 1);
9957 
9958     staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
9959             &gCamCapability[cameraId]->max_histogram_count, 1);
9960 
9961     //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9962     //so that app can request fewer number of bins than the maximum supported.
9963     std::vector<int32_t> histBins;
9964     int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9965     histBins.push_back(maxHistBins);
9966     while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9967            (maxHistBins & 0x1) == 0) {
9968         histBins.push_back(maxHistBins >> 1);
9969         maxHistBins >>= 1;
9970     }
9971     staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9972             histBins.data(), histBins.size());
9973     if (!histBins.empty()) {
9974         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9975     }
9976 
9977     int32_t sharpness_map_size[] = {
9978             gCamCapability[cameraId]->sharpness_map_size.width,
9979             gCamCapability[cameraId]->sharpness_map_size.height};
9980 
9981     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9982             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9983 
9984     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9985             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9986 
9987     int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9988     if (0 <= indexPD) {
9989         // Advertise PD stats data as part of the Depth capabilities
9990         int32_t depthWidth =
9991                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9992         int32_t depthHeight =
9993                 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9994         int32_t depthStride =
9995                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
9996         int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9997         assert(0 < depthSamplesCount);
9998         staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9999                 &depthSamplesCount, 1);
10000 
10001         int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
10002                 depthHeight,
10003                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
10004                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
10005                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
10006         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10007                 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
10008 
10009         int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
10010                 depthHeight, 33333333,
10011                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
10012         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10013                 depthMinDuration,
10014                 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
10015 
10016         int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
10017                 depthHeight, 0,
10018                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
10019         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10020                 depthStallDuration,
10021                 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
10022 
10023         uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
10024         staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
10025 
10026         //RAW16 depth format doesn't require inefficient memory copy, recommend
10027         //only this depth format. The format itself is not public so it won't be
10028         //possible to advertise in the RAW use case. Use snapshot for now.
10029         int32_t recommendedDepthConfigs[] = {depthWidth, depthHeight, HAL_PIXEL_FORMAT_RAW16,
10030             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
10031             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT};
10032         staticInfo.update(ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
10033                 recommendedDepthConfigs,
10034                 sizeof(recommendedDepthConfigs) / sizeof(recommendedDepthConfigs[0]));
10035         available_characteristics_keys.add(
10036                 ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS);
10037 
10038         int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
10039         staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
10040                 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
10041         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
10042 
10043         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
10044                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
10045                 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
10046         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
10047 
10048         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
10049                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
10050                 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
10051         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
10052 
10053         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
10054                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
10055                 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
10056         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
10057     }
10058 
10059 
10060     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
10061         &(gCamCapability[cameraId]->wb_cal.num_lights), 1);
10062     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS);
10063 
10064     const int32_t num_lights = gCamCapability[cameraId]->wb_cal.num_lights;
10065     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
10066         gCamCapability[cameraId]->wb_cal.r_over_g, num_lights);
10067     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS);
10068 
10069     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
10070         gCamCapability[cameraId]->wb_cal.b_over_g, num_lights);
10071     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS);
10072 
10073     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
10074         &(gCamCapability[cameraId]->wb_cal.gr_over_gb), 1);
10075     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO);
10076 
10077     int32_t scalar_formats[] = {
10078             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
10079             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
10080             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10081             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
10082             HAL_PIXEL_FORMAT_RAW10,
10083             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
10084             HAL_PIXEL_FORMAT_Y8};
10085     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
10086     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
10087             scalar_formats_count);
10088 
10089     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
10090     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10091     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
10092             count, MAX_SIZES_CNT, available_processed_sizes);
10093     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
10094             available_processed_sizes, count * 2);
10095 
10096     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
10097     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
10098     makeTable(gCamCapability[cameraId]->raw_dim,
10099             count, MAX_SIZES_CNT, available_raw_sizes);
10100     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
10101             available_raw_sizes, count * 2);
10102 
10103     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
10104     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
10105     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
10106             count, MAX_SIZES_CNT, available_fps_ranges);
10107     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10108             available_fps_ranges, count * 2);
10109 
10110     camera_metadata_rational exposureCompensationStep = {
10111             gCamCapability[cameraId]->exp_compensation_step.numerator,
10112             gCamCapability[cameraId]->exp_compensation_step.denominator};
10113     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
10114                       &exposureCompensationStep, 1);
10115 
10116     Vector<uint8_t> availableVstabModes;
10117     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
10118     char eis_prop[PROPERTY_VALUE_MAX];
10119     bool eisSupported = false;
10120     memset(eis_prop, 0, sizeof(eis_prop));
10121     property_get("persist.camera.eis.enable", eis_prop, "1");
10122     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
10123     count = IS_TYPE_MAX;
10124     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
10125     for (size_t i = 0; i < count; i++) {
10126         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
10127             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
10128             eisSupported = true;
10129             break;
10130         }
10131     }
10132     if (facingBack && eis_prop_set && eisSupported) {
10133         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
10134     }
10135     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10136                       availableVstabModes.array(), availableVstabModes.size());
10137 
10138     /*HAL 1 and HAL 3 common*/
10139     uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
10140     uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
10141     uint32_t minZoomStep = 100; //as per HAL1/API1 spec
10142     // Cap the max zoom to the max preferred value
10143     float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
10144     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10145             &maxZoom, 1);
10146 
10147     float zoomRatioRange[] = {1.0f, maxZoom};
10148     staticInfo.update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRatioRange, 2);
10149     gCamCapability[cameraId]->max_zoom = maxZoom;
10150 
10151     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
10152     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
10153 
10154     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
10155     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
10156         max3aRegions[2] = 0; /* AF not supported */
10157     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
10158             max3aRegions, 3);
10159 
10160     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
10161     memset(prop, 0, sizeof(prop));
10162     property_get("persist.camera.facedetect", prop, "1");
10163     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
10164     LOGD("Support face detection mode: %d",
10165              supportedFaceDetectMode);
10166 
10167     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
10168     /* support mode should be OFF if max number of face is 0 */
10169     if (maxFaces <= 0) {
10170         supportedFaceDetectMode = 0;
10171     }
10172     Vector<uint8_t> availableFaceDetectModes;
10173     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
10174     if (supportedFaceDetectMode == 1) {
10175         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10176     } else if (supportedFaceDetectMode == 2) {
10177         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10178     } else if (supportedFaceDetectMode == 3) {
10179         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10180         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10181     } else {
10182         maxFaces = 0;
10183     }
10184     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
10185             availableFaceDetectModes.array(),
10186             availableFaceDetectModes.size());
10187     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
10188             (int32_t *)&maxFaces, 1);
10189     uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
10190     staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
10191             &face_bsgc, 1);
10192 
10193     int32_t exposureCompensationRange[] = {
10194             gCamCapability[cameraId]->exposure_compensation_min,
10195             gCamCapability[cameraId]->exposure_compensation_max};
10196     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
10197             exposureCompensationRange,
10198             sizeof(exposureCompensationRange)/sizeof(int32_t));
10199 
10200     uint8_t lensFacing = (facingBack) ?
10201             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
10202     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
10203 
10204     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10205                       available_thumbnail_sizes,
10206                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
10207 
10208     /*all sizes will be clubbed into this tag*/
10209     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10210     /*android.scaler.availableStreamConfigurations*/
10211     Vector<int32_t> available_stream_configs;
10212     std::vector<config_entry> stream_configs;
10213     std::unordered_map<config_entry, int32_t, ConfigEntryHash> suggested_configs;
10214     int32_t suggested_proc_formats[] = {
10215         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10216         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
10217     size_t suggested_formats_count = sizeof(suggested_proc_formats) /
10218         sizeof(suggested_proc_formats[0]);
10219     cam_dimension_t active_array_dim;
10220     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
10221     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
10222 
10223     /*advertise list of input dimensions supported based on below property.
10224     By default all sizes upto 5MP will be advertised.
10225     Note that the setprop resolution format should be WxH.
10226     e.g: adb shell setprop persist.camera.input.minsize 1280x720
10227     To list all supported sizes, setprop needs to be set with "0x0" */
10228     cam_dimension_t minInputSize = {2592,1944}; //5MP
10229     memset(prop, 0, sizeof(prop));
10230     property_get("persist.camera.input.minsize", prop, "2592x1944");
10231     if (strlen(prop) > 0) {
10232         char *saveptr = NULL;
10233         char *token = strtok_r(prop, "x", &saveptr);
10234         if (token != NULL) {
10235             minInputSize.width = atoi(token);
10236         }
10237         token = strtok_r(NULL, "x", &saveptr);
10238         if (token != NULL) {
10239             minInputSize.height = atoi(token);
10240         }
10241     }
10242 
10243     int32_t raw_usecase =
10244             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW;
10245     int32_t zsl_snapshot_usecase =
10246             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT) |
10247             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL);
10248     int32_t zsl_usecase =
10249             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL;
10250     /* Add input/output stream configurations for each scalar formats*/
10251     for (size_t j = 0; j < scalar_formats_count; j++) {
10252         switch (scalar_formats[j]) {
10253         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10254         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10255         case HAL_PIXEL_FORMAT_RAW10:
10256             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10257                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10258                 addStreamConfig(available_stream_configs, scalar_formats[j],
10259                         gCamCapability[cameraId]->raw_dim[i],
10260                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10261                 config_entry entry(gCamCapability[cameraId]->raw_dim[i].width,
10262                         gCamCapability[cameraId]->raw_dim[i].height, scalar_formats[j],
10263                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10264                 stream_configs.push_back(entry);
10265                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_RAW10) ||
10266                         (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE)) {
10267                     suggested_configs[entry] |= raw_usecase;
10268                 }
10269             }
10270             break;
10271         case HAL_PIXEL_FORMAT_BLOB:
10272             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10273                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10274                 addStreamConfig(available_stream_configs, scalar_formats[j],
10275                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10276                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10277                 stream_configs.push_back(config_entry(
10278                             gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10279                             gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10280                             scalar_formats[j],
10281                             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT));
10282                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10283                         gCamCapability[cameraId]->picture_sizes_tbl[i].height, scalar_formats[j],
10284                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10285                 suggested_configs[entry] |= zsl_snapshot_usecase;
10286             }
10287             break;
10288         case HAL_PIXEL_FORMAT_YCbCr_420_888:
10289         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
10290         case HAL_PIXEL_FORMAT_Y8:
10291         default:
10292             cam_dimension_t largest_picture_size;
10293             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
10294             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10295                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10296                 addStreamConfig(available_stream_configs, scalar_formats[j],
10297                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10298                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10299                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10300                         gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10301                         scalar_formats[j],
10302                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10303                 stream_configs.push_back(entry);
10304                 suggested_configs[entry] |= zsl_snapshot_usecase;
10305                 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
10306                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
10307                         scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
10308                         scalar_formats[j] == HAL_PIXEL_FORMAT_Y8) && i == 0) {
10309                      if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
10310                             >= minInputSize.width) || (gCamCapability[cameraId]->
10311                             picture_sizes_tbl[i].height >= minInputSize.height)) {
10312                          addStreamConfig(available_stream_configs, scalar_formats[j],
10313                                  gCamCapability[cameraId]->picture_sizes_tbl[i],
10314                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10315                          config_entry entry(
10316                                  gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10317                                  gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10318                                  scalar_formats[j],
10319                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10320                          suggested_configs[entry] |= zsl_usecase;
10321                      }
10322                 }
10323             }
10324 
10325             break;
10326         }
10327     }
10328 
10329     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10330                       available_stream_configs.array(), available_stream_configs.size());
10331 
10332     int32_t preview_usecase =
10333             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW;
10334     for (size_t i = 0; i < gCamCapability[cameraId]->preview_sizes_tbl_cnt; i++) {
10335         for (size_t j = 0; j < suggested_formats_count; j++) {
10336             config_entry entry(gCamCapability[cameraId]->preview_sizes_tbl[i].width,
10337                     gCamCapability[cameraId]->preview_sizes_tbl[i].height,
10338                     suggested_proc_formats[j],
10339                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10340             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10341                     stream_configs.end()) {
10342                 suggested_configs[entry] |= preview_usecase;
10343             }
10344         }
10345     }
10346 
10347     int32_t record_usecase =
10348             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD;
10349     for (size_t i = 0; i < gCamCapability[cameraId]->video_sizes_tbl_cnt; i++) {
10350         for (size_t j = 0; j < suggested_formats_count; j++) {
10351             config_entry entry(gCamCapability[cameraId]->video_sizes_tbl[i].width,
10352                     gCamCapability[cameraId]->video_sizes_tbl[i].height,
10353                     suggested_proc_formats[j],
10354                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10355             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10356                     stream_configs.end()) {
10357                 suggested_configs[entry] |= record_usecase;
10358             }
10359         }
10360     }
10361 
10362     int32_t video_snapshot_usecase =
10363             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT;
10364     for (size_t i = 0; i < gCamCapability[cameraId]->livesnapshot_sizes_tbl_cnt; i++) {
10365         config_entry entry(gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].width,
10366                 gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].height,
10367                 HAL_PIXEL_FORMAT_BLOB,
10368                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10369         if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10370                 stream_configs.end()) {
10371             suggested_configs[entry] |= video_snapshot_usecase;
10372         }
10373     }
10374 
10375     std::vector<int32_t> suggested_array;
10376     suggested_array.reserve(suggested_configs.size() * 5);
10377     for (const auto &it : suggested_configs) {
10378         suggested_array.push_back(std::get<0>(it.first));
10379         suggested_array.push_back(std::get<1>(it.first));
10380         suggested_array.push_back(std::get<2>(it.first));
10381         suggested_array.push_back(std::get<3>(it.first));
10382         suggested_array.push_back(it.second);
10383     }
10384 
10385     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
10386             suggested_array.data(), suggested_array.size());
10387 
10388     /* android.scaler.availableMinFrameDurations */
10389     Vector<int64_t> available_min_durations;
10390     for (size_t j = 0; j < scalar_formats_count; j++) {
10391         switch (scalar_formats[j]) {
10392         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10393         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10394         case HAL_PIXEL_FORMAT_RAW10:
10395             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10396                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10397                 available_min_durations.add(scalar_formats[j]);
10398                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10399                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10400                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
10401             }
10402             break;
10403         default:
10404             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10405                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10406                 available_min_durations.add(scalar_formats[j]);
10407                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10408                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10409                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
10410             }
10411             break;
10412         }
10413     }
10414     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
10415                       available_min_durations.array(), available_min_durations.size());
10416 
10417     Vector<int32_t> available_hfr_configs;
10418     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
10419         int32_t fps = 0;
10420         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
10421         case CAM_HFR_MODE_60FPS:
10422             fps = 60;
10423             break;
10424         case CAM_HFR_MODE_90FPS:
10425             fps = 90;
10426             break;
10427         case CAM_HFR_MODE_120FPS:
10428             fps = 120;
10429             break;
10430         case CAM_HFR_MODE_150FPS:
10431             fps = 150;
10432             break;
10433         case CAM_HFR_MODE_180FPS:
10434             fps = 180;
10435             break;
10436         case CAM_HFR_MODE_210FPS:
10437             fps = 210;
10438             break;
10439         case CAM_HFR_MODE_240FPS:
10440             fps = 240;
10441             break;
10442         case CAM_HFR_MODE_480FPS:
10443             fps = 480;
10444             break;
10445         case CAM_HFR_MODE_OFF:
10446         case CAM_HFR_MODE_MAX:
10447         default:
10448             break;
10449         }
10450 
10451         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
10452         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
10453             /* For each HFR frame rate, need to advertise one variable fps range
10454              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
10455              * and [120, 120]. While camcorder preview alone is running [30, 120] is
10456              * set by the app. When video recording is started, [120, 120] is
10457              * set. This way sensor configuration does not change when recording
10458              * is started */
10459 
10460             /* (width, height, fps_min, fps_max, batch_size_max) */
10461             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
10462                 j < MAX_SIZES_CNT; j++) {
10463                 available_hfr_configs.add(
10464                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10465                 available_hfr_configs.add(
10466                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10467                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
10468                 available_hfr_configs.add(fps);
10469                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10470 
10471                 /* (width, height, fps_min, fps_max, batch_size_max) */
10472                 available_hfr_configs.add(
10473                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10474                 available_hfr_configs.add(
10475                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10476                 available_hfr_configs.add(fps);
10477                 available_hfr_configs.add(fps);
10478                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10479             }
10480        }
10481     }
10482     //Advertise HFR capability only if the property is set
10483     memset(prop, 0, sizeof(prop));
10484     property_get("persist.camera.hal3hfr.enable", prop, "1");
10485     uint8_t hfrEnable = (uint8_t)atoi(prop);
10486 
10487     if(hfrEnable && available_hfr_configs.array()) {
10488         staticInfo.update(
10489                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
10490                 available_hfr_configs.array(), available_hfr_configs.size());
10491     }
10492 
10493     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
10494     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
10495                       &max_jpeg_size, 1);
10496 
10497     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
10498     size_t size = 0;
10499     count = CAM_EFFECT_MODE_MAX;
10500     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
10501     for (size_t i = 0; i < count; i++) {
10502         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10503                 gCamCapability[cameraId]->supported_effects[i]);
10504         if (NAME_NOT_FOUND != val) {
10505             avail_effects[size] = (uint8_t)val;
10506             size++;
10507         }
10508     }
10509     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
10510                       avail_effects,
10511                       size);
10512 
10513     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
10514     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
10515     size_t supported_scene_modes_cnt = 0;
10516     count = CAM_SCENE_MODE_MAX;
10517     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
10518     for (size_t i = 0; i < count; i++) {
10519         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
10520                 CAM_SCENE_MODE_OFF) {
10521             int val = lookupFwkName(SCENE_MODES_MAP,
10522                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
10523                     gCamCapability[cameraId]->supported_scene_modes[i]);
10524 
10525             if (NAME_NOT_FOUND != val) {
10526                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
10527                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
10528                 supported_scene_modes_cnt++;
10529             }
10530         }
10531     }
10532     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10533                       avail_scene_modes,
10534                       supported_scene_modes_cnt);
10535 
10536     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
10537     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
10538                       supported_scene_modes_cnt,
10539                       CAM_SCENE_MODE_MAX,
10540                       scene_mode_overrides,
10541                       supported_indexes,
10542                       cameraId);
10543 
10544     if (supported_scene_modes_cnt == 0) {
10545         supported_scene_modes_cnt = 1;
10546         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10547     }
10548 
10549     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10550             scene_mode_overrides, supported_scene_modes_cnt * 3);
10551 
10552     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10553                                          ANDROID_CONTROL_MODE_AUTO,
10554                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10555     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10556             available_control_modes,
10557             3);
10558 
10559     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10560     size = 0;
10561     count = CAM_ANTIBANDING_MODE_MAX;
10562     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10563     for (size_t i = 0; i < count; i++) {
10564         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10565                 gCamCapability[cameraId]->supported_antibandings[i]);
10566         if (NAME_NOT_FOUND != val) {
10567             avail_antibanding_modes[size] = (uint8_t)val;
10568             size++;
10569         }
10570 
10571     }
10572     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10573                       avail_antibanding_modes,
10574                       size);
10575 
10576     uint8_t avail_abberation_modes[] = {
10577             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10578             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10579             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10580     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10581     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10582     if (0 == count) {
10583         //  If no aberration correction modes are available for a device, this advertise OFF mode
10584         size = 1;
10585     } else {
10586         // If count is not zero then atleast one among the FAST or HIGH quality is supported
10587         // So, advertize all 3 modes if atleast any one mode is supported as per the
10588         // new M requirement
10589         size = 3;
10590     }
10591     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10592             avail_abberation_modes,
10593             size);
10594 
10595     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10596     size = 0;
10597     count = CAM_FOCUS_MODE_MAX;
10598     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10599     for (size_t i = 0; i < count; i++) {
10600         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10601                 gCamCapability[cameraId]->supported_focus_modes[i]);
10602         if (NAME_NOT_FOUND != val) {
10603             avail_af_modes[size] = (uint8_t)val;
10604             size++;
10605         }
10606     }
10607     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10608                       avail_af_modes,
10609                       size);
10610 
10611     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10612     size = 0;
10613     count = CAM_WB_MODE_MAX;
10614     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10615     for (size_t i = 0; i < count; i++) {
10616         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10617                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10618                 gCamCapability[cameraId]->supported_white_balances[i]);
10619         if (NAME_NOT_FOUND != val) {
10620             avail_awb_modes[size] = (uint8_t)val;
10621             size++;
10622         }
10623     }
10624     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10625                       avail_awb_modes,
10626                       size);
10627 
10628     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10629     count = CAM_FLASH_FIRING_LEVEL_MAX;
10630     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10631             count);
10632     for (size_t i = 0; i < count; i++) {
10633         available_flash_levels[i] =
10634                 gCamCapability[cameraId]->supported_firing_levels[i];
10635     }
10636     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10637             available_flash_levels, count);
10638 
10639     uint8_t flashAvailable;
10640     if (gCamCapability[cameraId]->flash_available)
10641         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10642     else
10643         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10644     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10645             &flashAvailable, 1);
10646 
10647     Vector<uint8_t> avail_ae_modes;
10648     count = CAM_AE_MODE_MAX;
10649     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10650     for (size_t i = 0; i < count; i++) {
10651         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10652         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10653             aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
10654         }
10655         avail_ae_modes.add(aeMode);
10656     }
10657     if (flashAvailable) {
10658         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10659         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10660     }
10661     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10662                       avail_ae_modes.array(),
10663                       avail_ae_modes.size());
10664 
10665     int32_t sensitivity_range[2];
10666     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10667     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10668     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10669                       sensitivity_range,
10670                       sizeof(sensitivity_range) / sizeof(int32_t));
10671 
10672     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10673                       &gCamCapability[cameraId]->max_analog_sensitivity,
10674                       1);
10675 
10676     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10677     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10678                       &sensor_orientation,
10679                       1);
10680 
10681     int32_t max_output_streams[] = {
10682             MAX_STALLING_STREAMS,
10683             MAX_PROCESSED_STREAMS,
10684             MAX_RAW_STREAMS};
10685     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10686             max_output_streams,
10687             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10688 
10689     uint8_t avail_leds = 0;
10690     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10691                       &avail_leds, 0);
10692 
10693     uint8_t focus_dist_calibrated;
10694     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10695             gCamCapability[cameraId]->focus_dist_calibrated);
10696     if (NAME_NOT_FOUND != val) {
10697         focus_dist_calibrated = (uint8_t)val;
10698         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10699                      &focus_dist_calibrated, 1);
10700     }
10701 
10702     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10703     size = 0;
10704     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10705             MAX_TEST_PATTERN_CNT);
10706     for (size_t i = 0; i < count; i++) {
10707         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10708                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10709         if (NAME_NOT_FOUND != testpatternMode) {
10710             avail_testpattern_modes[size] = testpatternMode;
10711             size++;
10712         }
10713     }
10714     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10715                       avail_testpattern_modes,
10716                       size);
10717 
10718     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10719     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10720                       &max_pipeline_depth,
10721                       1);
10722 
10723     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10724     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10725                       &partial_result_count,
10726                        1);
10727 
10728     int32_t max_stall_duration = MAX_REPROCESS_STALL;
10729     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10730 
10731     Vector<uint8_t> available_capabilities;
10732     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10733     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10734     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10735     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10736     if (supportBurst) {
10737         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10738     }
10739     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10740     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10741     if (hfrEnable && available_hfr_configs.array()) {
10742         available_capabilities.add(
10743                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10744     }
10745 
10746     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10747         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10748     }
10749     // Only back camera supports MOTION_TRACKING
10750     if (cameraId == 0) {
10751         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
10752     }
10753 
10754     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10755             available_capabilities.array(),
10756             available_capabilities.size());
10757 
10758     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10759     //Assumption is that all bayer cameras support MANUAL_SENSOR.
10760     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10761             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10762 
10763     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10764             &aeLockAvailable, 1);
10765 
10766     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10767     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10768     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10769             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10770 
10771     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10772             &awbLockAvailable, 1);
10773 
10774     int32_t max_input_streams = 1;
10775     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10776                       &max_input_streams,
10777                       1);
10778 
10779     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10780     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 3,
10781             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10782             HAL_PIXEL_FORMAT_Y8,
10783             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10784             HAL_PIXEL_FORMAT_YCbCr_420_888,
10785             HAL_PIXEL_FORMAT_Y8, 2,
10786             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_Y8};
10787     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10788                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10789 
10790     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
10791                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10792 
10793     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10794     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10795                       &max_latency,
10796                       1);
10797 
10798 #ifndef USE_HAL_3_3
10799     int32_t isp_sensitivity_range[2];
10800     isp_sensitivity_range[0] =
10801         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10802     isp_sensitivity_range[1] =
10803         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10804     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10805                       isp_sensitivity_range,
10806                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10807 #endif
10808 
10809     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10810                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10811     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10812             available_hot_pixel_modes,
10813             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10814 
10815     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10816                                          ANDROID_SHADING_MODE_FAST,
10817                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
10818     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10819                       available_shading_modes,
10820                       3);
10821 
10822     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10823                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10824     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10825                       available_lens_shading_map_modes,
10826                       2);
10827 
10828     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10829                                       ANDROID_EDGE_MODE_FAST,
10830                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
10831                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10832     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10833             available_edge_modes,
10834             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10835 
10836     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10837                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
10838                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10839                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10840                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10841     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10842             available_noise_red_modes,
10843             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10844 
10845     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10846                                          ANDROID_TONEMAP_MODE_FAST,
10847                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10848     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10849             available_tonemap_modes,
10850             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10851 
10852     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10853     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10854             available_hot_pixel_map_modes,
10855             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10856 
10857     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10858             gCamCapability[cameraId]->reference_illuminant1);
10859     if (NAME_NOT_FOUND != val) {
10860         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10861         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10862     }
10863 
10864     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10865             gCamCapability[cameraId]->reference_illuminant2);
10866     if (NAME_NOT_FOUND != val) {
10867         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10868         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10869     }
10870 
10871     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10872             (void *)gCamCapability[cameraId]->forward_matrix1,
10873             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10874 
10875     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10876             (void *)gCamCapability[cameraId]->forward_matrix2,
10877             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10878 
10879     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10880             (void *)gCamCapability[cameraId]->color_transform1,
10881             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10882 
10883     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10884             (void *)gCamCapability[cameraId]->color_transform2,
10885             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10886 
10887     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10888             (void *)gCamCapability[cameraId]->calibration_transform1,
10889             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10890 
10891     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10892             (void *)gCamCapability[cameraId]->calibration_transform2,
10893             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10894 
10895 #ifndef USE_HAL_3_3
10896 
10897     int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10898         ANDROID_CONTROL_AE_TARGET_FPS_RANGE, QCAMERA3_INSTANT_AEC_MODE, QCAMERA3_USE_AV_TIMER,
10899         QCAMERA3_VIDEO_HDR_MODE, TANGO_MODE_DATA_SENSOR_FULLFOV};
10900     staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
10901             sizeof(session_keys) / sizeof(session_keys[0]));
10902 
10903 #endif
10904 
10905     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10906        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10907        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10908        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10909        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10910        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10911        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10912        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10913        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10914        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10915        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10916        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10917        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10918        ANDROID_JPEG_GPS_COORDINATES,
10919        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10920        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10921        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10922        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10923        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10924        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10925        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10926        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10927        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10928        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
10929 #ifndef USE_HAL_3_3
10930        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10931        ANDROID_CONTROL_ZOOM_RATIO,
10932 #endif
10933        ANDROID_STATISTICS_FACE_DETECT_MODE,
10934        ANDROID_STATISTICS_SHARPNESS_MAP_MODE, ANDROID_STATISTICS_OIS_DATA_MODE,
10935        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10936        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10937        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10938        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10939        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10940        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10941        QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10942        QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10943        QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10944        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10945        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10946        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10947        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10948        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10949        QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10950        QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10951        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10952        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10953        QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10954        QCAMERA3_BINNING_CORRECTION_MODE,
10955        /* DevCamDebug metadata request_keys_basic */
10956        DEVCAMDEBUG_META_ENABLE,
10957        /* DevCamDebug metadata end */
10958        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10959        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10960        TANGO_MODE_DATA_SENSOR_FULLFOV,
10961        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10962        NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
10963        NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
10964        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
10965        };
10966 
10967     size_t request_keys_cnt =
10968             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10969     Vector<int32_t> available_request_keys;
10970     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10971     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10972         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10973     }
10974 
10975     if (gExposeEnableZslKey) {
10976         available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10977         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
10978         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
10979         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
10980     }
10981 
10982     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10983             available_request_keys.array(), available_request_keys.size());
10984 
10985     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10986        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10987        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10988        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10989        ANDROID_CONTROL_AF_SCENE_CHANGE,
10990        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10991        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10992        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10993        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10994        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10995        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10996        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10997        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10998        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10999        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
11000        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
11001        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
11002        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
11003        ANDROID_STATISTICS_FACE_DETECT_MODE,
11004        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
11005        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
11006        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
11007        ANDROID_STATISTICS_FACE_SCORES, ANDROID_STATISTICS_OIS_DATA_MODE,
11008        ANDROID_STATISTICS_OIS_TIMESTAMPS, ANDROID_STATISTICS_OIS_X_SHIFTS,
11009        ANDROID_STATISTICS_OIS_Y_SHIFTS,
11010 #ifndef USE_HAL_3_3
11011        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
11012        ANDROID_CONTROL_ZOOM_RATIO,
11013 #endif
11014        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
11015        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
11016        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
11017        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
11018        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
11019        QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
11020        QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
11021        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
11022        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
11023        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11024        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
11025        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
11026        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
11027        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
11028        QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
11029        QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
11030        QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
11031        QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
11032        QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
11033        QCAMERA3_STATS_GAZE_DEGREE,
11034        // DevCamDebug metadata result_keys_basic
11035        DEVCAMDEBUG_META_ENABLE,
11036        // DevCamDebug metadata result_keys AF
11037        DEVCAMDEBUG_AF_LENS_POSITION,
11038        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
11039        DEVCAMDEBUG_AF_TOF_DISTANCE,
11040        DEVCAMDEBUG_AF_LUMA,
11041        DEVCAMDEBUG_AF_HAF_STATE,
11042        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
11043        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
11044        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
11045        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
11046        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
11047        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
11048        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
11049        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
11050        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
11051        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
11052        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
11053        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
11054        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
11055        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
11056        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
11057        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
11058        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
11059        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
11060        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
11061        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
11062        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
11063        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
11064        // DevCamDebug metadata result_keys AEC
11065        DEVCAMDEBUG_AEC_TARGET_LUMA,
11066        DEVCAMDEBUG_AEC_COMP_LUMA,
11067        DEVCAMDEBUG_AEC_AVG_LUMA,
11068        DEVCAMDEBUG_AEC_CUR_LUMA,
11069        DEVCAMDEBUG_AEC_LINECOUNT,
11070        DEVCAMDEBUG_AEC_REAL_GAIN,
11071        DEVCAMDEBUG_AEC_EXP_INDEX,
11072        DEVCAMDEBUG_AEC_LUX_IDX,
11073        // DevCamDebug metadata result_keys zzHDR
11074        DEVCAMDEBUG_AEC_L_REAL_GAIN,
11075        DEVCAMDEBUG_AEC_L_LINECOUNT,
11076        DEVCAMDEBUG_AEC_S_REAL_GAIN,
11077        DEVCAMDEBUG_AEC_S_LINECOUNT,
11078        DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
11079        DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
11080        // DevCamDebug metadata result_keys ADRC
11081        DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
11082        DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
11083        DEVCAMDEBUG_AEC_GTM_RATIO,
11084        DEVCAMDEBUG_AEC_LTM_RATIO,
11085        DEVCAMDEBUG_AEC_LA_RATIO,
11086        DEVCAMDEBUG_AEC_GAMMA_RATIO,
11087        // DevCamDebug metadata result_keys AEC MOTION
11088        DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
11089        DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
11090        DEVCAMDEBUG_AEC_SUBJECT_MOTION,
11091        // DevCamDebug metadata result_keys AWB
11092        DEVCAMDEBUG_AWB_R_GAIN,
11093        DEVCAMDEBUG_AWB_G_GAIN,
11094        DEVCAMDEBUG_AWB_B_GAIN,
11095        DEVCAMDEBUG_AWB_CCT,
11096        DEVCAMDEBUG_AWB_DECISION,
11097        /* DevCamDebug metadata end */
11098        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
11099        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
11100        NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
11101        NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
11102        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
11103        NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
11104        NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
11105        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
11106        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
11107        NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
11108        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
11109        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
11110        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
11111        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
11112        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
11113        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
11114        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
11115        NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION
11116        };
11117 
11118     size_t result_keys_cnt =
11119             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
11120 
11121     Vector<int32_t> available_result_keys;
11122     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
11123     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
11124         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
11125     }
11126     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
11127         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
11128         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
11129     }
11130     if (supportedFaceDetectMode == 1) {
11131         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
11132         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
11133     } else if ((supportedFaceDetectMode == 2) ||
11134             (supportedFaceDetectMode == 3)) {
11135         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
11136         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
11137     }
11138 #ifndef USE_HAL_3_3
11139     {
11140         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
11141         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
11142     }
11143 #endif
11144 
11145     if (gExposeEnableZslKey) {
11146         available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
11147         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
11148         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
11149         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
11150     }
11151 
11152     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11153             available_result_keys.array(), available_result_keys.size());
11154 
11155     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
11156        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
11157        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
11158        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
11159        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
11160        ANDROID_SCALER_CROPPING_TYPE,
11161        ANDROID_SYNC_MAX_LATENCY,
11162        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
11163        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
11164        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
11165        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
11166        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
11167        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
11168        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
11169        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
11170        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
11171        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
11172        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
11173        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
11174        ANDROID_LENS_FACING,
11175        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
11176        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
11177        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11178        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
11179        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
11180        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
11181        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
11182        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
11183        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
11184        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
11185        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
11186        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
11187        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
11188        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
11189        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
11190        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
11191        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
11192        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
11193        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
11194        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
11195        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
11196        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
11197        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
11198        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
11199        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
11200        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
11201        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
11202        ANDROID_TONEMAP_MAX_CURVE_POINTS,
11203        ANDROID_CONTROL_AVAILABLE_MODES,
11204        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
11205        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
11206        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
11207        ANDROID_SHADING_AVAILABLE_MODES,
11208        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
11209 #ifndef USE_HAL_3_3
11210        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
11211        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
11212        ANDROID_CONTROL_ZOOM_RATIO_RANGE,
11213 #endif
11214        ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
11215        ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
11216        QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
11217        QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
11218        QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11219        QCAMERA3_SHARPNESS_RANGE,
11220        QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
11221        QCAMERA3_STATS_BSGC_AVAILABLE
11222        };
11223 
11224     available_characteristics_keys.appendArray(characteristics_keys_basic,
11225             sizeof(characteristics_keys_basic)/sizeof(int32_t));
11226 #ifndef USE_HAL_3_3
11227     if (hasBlackRegions) {
11228         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
11229     }
11230 #endif
11231 
11232     if (cameraId == 0) {
11233         int32_t lensCalibrationKeys[] = {
11234             ANDROID_LENS_POSE_ROTATION,
11235             ANDROID_LENS_POSE_TRANSLATION,
11236             ANDROID_LENS_POSE_REFERENCE,
11237             ANDROID_LENS_INTRINSIC_CALIBRATION,
11238             ANDROID_LENS_DISTORTION,
11239         };
11240         available_characteristics_keys.appendArray(lensCalibrationKeys,
11241                 sizeof(lensCalibrationKeys) / sizeof(lensCalibrationKeys[0]));
11242     }
11243 
11244     if (0 <= indexPD) {
11245         int32_t depthKeys[] = {
11246                 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
11247                 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
11248                 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
11249                 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
11250                 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
11251         };
11252         available_characteristics_keys.appendArray(depthKeys,
11253                 sizeof(depthKeys) / sizeof(depthKeys[0]));
11254     }
11255 
11256     /*available stall durations depend on the hw + sw and will be different for different devices */
11257     /*have to add for raw after implementation*/
11258     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
11259     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
11260 
11261     Vector<int64_t> available_stall_durations;
11262     for (uint32_t j = 0; j < stall_formats_count; j++) {
11263         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
11264             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11265                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
11266                 available_stall_durations.add(stall_formats[j]);
11267                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
11268                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
11269                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
11270           }
11271         } else {
11272             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11273                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11274                 available_stall_durations.add(stall_formats[j]);
11275                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
11276                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
11277                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
11278             }
11279         }
11280     }
11281     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
11282                       available_stall_durations.array(),
11283                       available_stall_durations.size());
11284 
11285     //QCAMERA3_OPAQUE_RAW
11286     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11287     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11288     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
11289     case LEGACY_RAW:
11290         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11291             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
11292         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11293             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11294         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11295             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
11296         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11297         break;
11298     case MIPI_RAW:
11299         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11300             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
11301         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11302             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
11303         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11304             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
11305         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
11306         break;
11307     default:
11308         LOGE("unknown opaque_raw_format %d",
11309                 gCamCapability[cameraId]->opaque_raw_fmt);
11310         break;
11311     }
11312     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
11313 
11314     Vector<int32_t> strides;
11315     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11316             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11317         cam_stream_buf_plane_info_t buf_planes;
11318         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
11319         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
11320         cam_stream_info_t info = {.fmt = fmt};
11321         mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11322             &gCamCapability[cameraId]->padding_info, &buf_planes);
11323         strides.add(buf_planes.plane_info.mp[0].stride);
11324     }
11325 
11326     if (!strides.isEmpty()) {
11327         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
11328                 strides.size());
11329         available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
11330     }
11331 
11332     //TBD: remove the following line once backend advertises zzHDR in feature mask
11333     gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
11334     //Video HDR default
11335     if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
11336             (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
11337             CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
11338         int32_t vhdr_mode[] = {
11339                 QCAMERA3_VIDEO_HDR_MODE_OFF,
11340                 QCAMERA3_VIDEO_HDR_MODE_ON};
11341 
11342         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
11343         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
11344                     vhdr_mode, vhdr_mode_count);
11345         available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
11346     }
11347 
11348     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11349             (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
11350             sizeof(gCamCapability[cameraId]->related_cam_calibration));
11351 
11352     uint8_t isMonoOnly =
11353             (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
11354     staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
11355             &isMonoOnly, 1);
11356 
11357 #ifndef USE_HAL_3_3
11358     Vector<int32_t> opaque_size;
11359     for (size_t j = 0; j < scalar_formats_count; j++) {
11360         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
11361             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11362                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11363                 cam_stream_buf_plane_info_t buf_planes;
11364                 cam_stream_info_t info = {.fmt = fmt};
11365                 rc = mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11366                          &gCamCapability[cameraId]->padding_info, &buf_planes);
11367 
11368                 if (rc == 0) {
11369                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
11370                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
11371                     opaque_size.add(buf_planes.plane_info.frame_len);
11372                 }else {
11373                     LOGE("raw frame calculation failed!");
11374                 }
11375             }
11376         }
11377     }
11378 
11379     if ((opaque_size.size() > 0) &&
11380             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
11381         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
11382     else
11383         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
11384 #endif
11385 
11386     if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
11387         int32_t avail_ir_modes[CAM_IR_MODE_MAX];
11388         size = 0;
11389         count = CAM_IR_MODE_MAX;
11390         count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
11391         for (size_t i = 0; i < count; i++) {
11392             int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
11393                     gCamCapability[cameraId]->supported_ir_modes[i]);
11394             if (NAME_NOT_FOUND != val) {
11395                 avail_ir_modes[size] = (int32_t)val;
11396                 size++;
11397             }
11398         }
11399         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
11400                 avail_ir_modes, size);
11401         available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
11402     }
11403 
11404     if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
11405         uint8_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
11406         size = 0;
11407         count = CAM_AEC_CONVERGENCE_MAX;
11408         count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
11409         for (size_t i = 0; i < count; i++) {
11410             int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
11411                     gCamCapability[cameraId]->supported_instant_aec_modes[i]);
11412             if (NAME_NOT_FOUND != val) {
11413                 available_instant_aec_modes[size] = (uint8_t)val;
11414                 size++;
11415             }
11416         }
11417         staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
11418                 available_instant_aec_modes, size);
11419         available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
11420     }
11421 
11422     int32_t sharpness_range[] = {
11423             gCamCapability[cameraId]->sharpness_ctrl.min_value,
11424             gCamCapability[cameraId]->sharpness_ctrl.max_value};
11425     staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
11426 
11427     if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
11428         int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
11429         size = 0;
11430         count = CAM_BINNING_CORRECTION_MODE_MAX;
11431         count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
11432         for (size_t i = 0; i < count; i++) {
11433             int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
11434                     METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
11435                     gCamCapability[cameraId]->supported_binning_modes[i]);
11436             if (NAME_NOT_FOUND != val) {
11437                 avail_binning_modes[size] = (int32_t)val;
11438                 size++;
11439             }
11440         }
11441         staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
11442                 avail_binning_modes, size);
11443         available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
11444     }
11445 
11446     if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
11447         int32_t available_aec_modes[CAM_AEC_MODE_MAX];
11448         size = 0;
11449         count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
11450         for (size_t i = 0; i < count; i++) {
11451             int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
11452                     gCamCapability[cameraId]->supported_aec_modes[i]);
11453             if (NAME_NOT_FOUND != val)
11454                 available_aec_modes[size++] = val;
11455         }
11456         staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
11457                 available_aec_modes, size);
11458         available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
11459     }
11460 
11461     if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
11462         int32_t available_iso_modes[CAM_ISO_MODE_MAX];
11463         size = 0;
11464         count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
11465         for (size_t i = 0; i < count; i++) {
11466             int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
11467                     gCamCapability[cameraId]->supported_iso_modes[i]);
11468             if (NAME_NOT_FOUND != val)
11469                 available_iso_modes[size++] = val;
11470         }
11471         staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
11472                 available_iso_modes, size);
11473         available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
11474     }
11475 
11476     int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
11477     for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
11478         available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
11479     staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
11480             available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
11481 
11482     int32_t available_saturation_range[4];
11483     available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
11484     available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
11485     available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
11486     available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
11487     staticInfo.update(QCAMERA3_SATURATION_RANGE,
11488             available_saturation_range, 4);
11489 
11490     uint8_t is_hdr_values[2];
11491     is_hdr_values[0] = 0;
11492     is_hdr_values[1] = 1;
11493     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
11494             is_hdr_values, 2);
11495 
11496     float is_hdr_confidence_range[2];
11497     is_hdr_confidence_range[0] = 0.0;
11498     is_hdr_confidence_range[1] = 1.0;
11499     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
11500             is_hdr_confidence_range, 2);
11501 
11502     size_t eepromLength = strnlen(
11503             reinterpret_cast<const char *>(
11504                     gCamCapability[cameraId]->eeprom_version_info),
11505             sizeof(gCamCapability[cameraId]->eeprom_version_info));
11506     if (0 < eepromLength) {
11507         char easelInfo[] = ",E:N";
11508         char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
11509         if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
11510             eepromLength += sizeof(easelInfo);
11511             strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
11512                     gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-Y" : ",E:N"),
11513                     MAX_EEPROM_VERSION_INFO_LEN);
11514         }
11515         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
11516                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11517         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
11518 
11519         staticInfo.update(ANDROID_INFO_VERSION,
11520                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11521         available_characteristics_keys.add(ANDROID_INFO_VERSION);
11522     }
11523 
11524     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
11525                       available_characteristics_keys.array(),
11526                       available_characteristics_keys.size());
11527 
11528     std::vector<uint8_t> availableOisModes;
11529     availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_OFF);
11530     if (cameraId == 0) {
11531         availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_ON);
11532     }
11533 
11534     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
11535                       availableOisModes.data(),
11536                       availableOisModes.size());
11537 
11538     gStaticMetadata[cameraId] = staticInfo.release();
11539     return rc;
11540 }
11541 
11542 /*===========================================================================
11543  * FUNCTION   : makeTable
11544  *
11545  * DESCRIPTION: make a table of sizes
11546  *
11547  * PARAMETERS :
11548  *
11549  *
11550  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)11551 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
11552         size_t max_size, int32_t *sizeTable)
11553 {
11554     size_t j = 0;
11555     if (size > max_size) {
11556        size = max_size;
11557     }
11558     for (size_t i = 0; i < size; i++) {
11559         sizeTable[j] = dimTable[i].width;
11560         sizeTable[j+1] = dimTable[i].height;
11561         j+=2;
11562     }
11563 }
11564 
11565 /*===========================================================================
11566  * FUNCTION   : makeFPSTable
11567  *
11568  * DESCRIPTION: make a table of fps ranges
11569  *
11570  * PARAMETERS :
11571  *
11572  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)11573 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
11574         size_t max_size, int32_t *fpsRangesTable)
11575 {
11576     size_t j = 0;
11577     if (size > max_size) {
11578        size = max_size;
11579     }
11580     for (size_t i = 0; i < size; i++) {
11581         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
11582         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
11583         j+=2;
11584     }
11585 }
11586 
11587 /*===========================================================================
11588  * FUNCTION   : makeOverridesList
11589  *
11590  * DESCRIPTION: make a list of scene mode overrides
11591  *
11592  * PARAMETERS :
11593  *
11594  *
11595  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)11596 void QCamera3HardwareInterface::makeOverridesList(
11597         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
11598         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
11599 {
11600     /*daemon will give a list of overrides for all scene modes.
11601       However we should send the fwk only the overrides for the scene modes
11602       supported by the framework*/
11603     size_t j = 0;
11604     if (size > max_size) {
11605        size = max_size;
11606     }
11607     size_t focus_count = CAM_FOCUS_MODE_MAX;
11608     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11609             focus_count);
11610     for (size_t i = 0; i < size; i++) {
11611         bool supt = false;
11612         size_t index = supported_indexes[i];
11613         overridesList[j] = gCamCapability[camera_id]->flash_available ?
11614                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11615         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11616                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11617                 overridesTable[index].awb_mode);
11618         if (NAME_NOT_FOUND != val) {
11619             overridesList[j+1] = (uint8_t)val;
11620         }
11621         uint8_t focus_override = overridesTable[index].af_mode;
11622         for (size_t k = 0; k < focus_count; k++) {
11623            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11624               supt = true;
11625               break;
11626            }
11627         }
11628         if (supt) {
11629             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11630                     focus_override);
11631             if (NAME_NOT_FOUND != val) {
11632                 overridesList[j+2] = (uint8_t)val;
11633             }
11634         } else {
11635            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11636         }
11637         j+=3;
11638     }
11639 }
11640 
11641 /*===========================================================================
11642  * FUNCTION   : filterJpegSizes
11643  *
11644  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11645  *              could be downscaled to
11646  *
11647  * PARAMETERS :
11648  *
11649  * RETURN     : length of jpegSizes array
11650  *==========================================================================*/
11651 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)11652 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11653         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11654         uint8_t downscale_factor)
11655 {
11656     if (0 == downscale_factor) {
11657         downscale_factor = 1;
11658     }
11659 
11660     int32_t min_width = active_array_size.width / downscale_factor;
11661     int32_t min_height = active_array_size.height / downscale_factor;
11662     size_t jpegSizesCnt = 0;
11663     if (processedSizesCnt > maxCount) {
11664         processedSizesCnt = maxCount;
11665     }
11666     for (size_t i = 0; i < processedSizesCnt; i+=2) {
11667         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11668             jpegSizes[jpegSizesCnt] = processedSizes[i];
11669             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11670             jpegSizesCnt += 2;
11671         }
11672     }
11673     return jpegSizesCnt;
11674 }
11675 
11676 /*===========================================================================
11677  * FUNCTION   : computeNoiseModelEntryS
11678  *
11679  * DESCRIPTION: function to map a given sensitivity to the S noise
11680  *              model parameters in the DNG noise model.
11681  *
11682  * PARAMETERS : sens : the sensor sensitivity
11683  *
11684  ** RETURN    : S (sensor amplification) noise
11685  *
11686  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)11687 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11688     double s = gCamCapability[mCameraId]->gradient_S * sens +
11689             gCamCapability[mCameraId]->offset_S;
11690     return ((s < 0.0) ? 0.0 : s);
11691 }
11692 
11693 /*===========================================================================
11694  * FUNCTION   : computeNoiseModelEntryO
11695  *
11696  * DESCRIPTION: function to map a given sensitivity to the O noise
11697  *              model parameters in the DNG noise model.
11698  *
11699  * PARAMETERS : sens : the sensor sensitivity
11700  *
11701  ** RETURN    : O (sensor readout) noise
11702  *
11703  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)11704 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11705     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11706     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11707             1.0 : (1.0 * sens / max_analog_sens);
11708     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11709             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11710     return ((o < 0.0) ? 0.0 : o);
11711 }
11712 
11713 /*===========================================================================
11714  * FUNCTION   : getSensorSensitivity
11715  *
11716  * DESCRIPTION: convert iso_mode to an integer value
11717  *
11718  * PARAMETERS : iso_mode : the iso_mode supported by sensor
11719  *
11720  ** RETURN    : sensitivity supported by sensor
11721  *
11722  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)11723 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11724 {
11725     int32_t sensitivity;
11726 
11727     switch (iso_mode) {
11728     case CAM_ISO_MODE_100:
11729         sensitivity = 100;
11730         break;
11731     case CAM_ISO_MODE_200:
11732         sensitivity = 200;
11733         break;
11734     case CAM_ISO_MODE_400:
11735         sensitivity = 400;
11736         break;
11737     case CAM_ISO_MODE_800:
11738         sensitivity = 800;
11739         break;
11740     case CAM_ISO_MODE_1600:
11741         sensitivity = 1600;
11742         break;
11743     default:
11744         sensitivity = -1;
11745         break;
11746     }
11747     return sensitivity;
11748 }
11749 
initHdrPlusClientLocked()11750 int QCamera3HardwareInterface::initHdrPlusClientLocked() {
11751     if (gEaselManagerClient == nullptr) {
11752         gEaselManagerClient = EaselManagerClient::create();
11753         if (gEaselManagerClient == nullptr) {
11754             ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11755             return -ENODEV;
11756         }
11757     }
11758 
11759     if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
11760         // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11761         //  to connect to Easel.
11762         bool doNotpowerOnEasel =
11763                 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11764 
11765         if (doNotpowerOnEasel) {
11766             ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11767             return OK;
11768         }
11769 
11770         // If Easel is present, power on Easel and suspend it immediately.
11771         status_t res = gEaselManagerClient->open();
11772         if (res != OK) {
11773             ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11774                     res);
11775             return res;
11776         }
11777 
11778         EaselManagerClientOpened = true;
11779 
11780         res = gEaselManagerClient->suspend();
11781         if (res != OK) {
11782             ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11783         }
11784 
11785         gEaselBypassOnly = property_get_bool("persist.camera.hdrplus.disable", false);
11786         gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
11787 
11788         // Expose enableZsl key only when HDR+ mode is enabled.
11789         gExposeEnableZslKey = !gEaselBypassOnly;
11790     }
11791 
11792     return OK;
11793 }
11794 
11795 /*===========================================================================
11796  * FUNCTION   : isStreamCombinationSupported
11797  *
11798  * DESCRIPTION: query camera support for specific stream combination
11799  *
11800  * PARAMETERS :
11801  *   @cameraId  : camera Id
11802  *   @comb      : stream combination
11803  *
11804  * RETURN     : int type of status
11805  *              NO_ERROR  -- in case combination is supported
11806  *              none-zero failure code
11807  *==========================================================================*/
isStreamCombinationSupported(uint32_t cameraId,const camera_stream_combination_t * comb)11808 int QCamera3HardwareInterface::isStreamCombinationSupported(uint32_t cameraId,
11809         const camera_stream_combination_t *comb)
11810 {
11811     int rc = BAD_VALUE;
11812     pthread_mutex_lock(&gCamLock);
11813 
11814     if (NULL == gCamCapability[cameraId]) {
11815         rc = initCapabilities(cameraId);
11816         if (rc < 0) {
11817             pthread_mutex_unlock(&gCamLock);
11818             return rc;
11819         }
11820     }
11821 
11822     camera3_stream_configuration_t streamList = {comb->num_streams, /*streams*/ nullptr,
11823             comb->operation_mode, /*session_parameters*/ nullptr};
11824     streamList.streams = new camera3_stream_t * [comb->num_streams];
11825     camera3_stream_t *streamBuffer = new camera3_stream_t[comb->num_streams];
11826     for (size_t i = 0; i < comb->num_streams; i++) {
11827         streamBuffer[i] = {comb->streams[i].stream_type, comb->streams[i].width,
11828             comb->streams[i].height, comb->streams[i].format, comb->streams[i].usage,
11829             /*max_buffers*/ 0, /*priv*/ nullptr, comb->streams[i].data_space,
11830             comb->streams[i].rotation, comb->streams[i].physical_camera_id, /*reserved*/ {nullptr}};
11831         streamList.streams[i] = &streamBuffer[i];
11832     }
11833 
11834     StreamValidateStatus validateStatus;
11835     rc = validateStreamCombination(cameraId, &streamList, &validateStatus);
11836 
11837     delete [] streamBuffer;
11838     delete [] streamList.streams;
11839     pthread_mutex_unlock(&gCamLock);
11840 
11841     return rc;
11842 }
11843 
11844 /*===========================================================================
11845  * FUNCTION   : getCamInfo
11846  *
11847  * DESCRIPTION: query camera capabilities
11848  *
11849  * PARAMETERS :
11850  *   @cameraId  : camera Id
11851  *   @info      : camera info struct to be filled in with camera capabilities
11852  *
11853  * RETURN     : int type of status
11854  *              NO_ERROR  -- success
11855  *              none-zero failure code
11856  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)11857 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11858         struct camera_info *info)
11859 {
11860     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
11861     int rc = 0;
11862 
11863     pthread_mutex_lock(&gCamLock);
11864 
11865     {
11866         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
11867         rc = initHdrPlusClientLocked();
11868         if (rc != OK) {
11869             ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11870             pthread_mutex_unlock(&gCamLock);
11871             return rc;
11872         }
11873     }
11874 
11875     if (NULL == gCamCapability[cameraId]) {
11876         rc = initCapabilities(cameraId);
11877         if (rc < 0) {
11878             pthread_mutex_unlock(&gCamLock);
11879             return rc;
11880         }
11881     }
11882 
11883     if (NULL == gStaticMetadata[cameraId]) {
11884         rc = initStaticMetadata(cameraId);
11885         if (rc < 0) {
11886             pthread_mutex_unlock(&gCamLock);
11887             return rc;
11888         }
11889     }
11890 
11891     switch(gCamCapability[cameraId]->position) {
11892     case CAM_POSITION_BACK:
11893     case CAM_POSITION_BACK_AUX:
11894         info->facing = CAMERA_FACING_BACK;
11895         break;
11896 
11897     case CAM_POSITION_FRONT:
11898     case CAM_POSITION_FRONT_AUX:
11899         info->facing = CAMERA_FACING_FRONT;
11900         break;
11901 
11902     default:
11903         LOGE("Unknown position type %d for camera id:%d",
11904                 gCamCapability[cameraId]->position, cameraId);
11905         rc = -1;
11906         break;
11907     }
11908 
11909 
11910     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
11911 #ifndef USE_HAL_3_3
11912     info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
11913 #else
11914     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
11915 #endif
11916     info->static_camera_characteristics = gStaticMetadata[cameraId];
11917 
11918     //For now assume both cameras can operate independently.
11919     info->conflicting_devices = NULL;
11920     info->conflicting_devices_length = 0;
11921 
11922     //resource cost is 100 * MIN(1.0, m/M),
11923     //where m is throughput requirement with maximum stream configuration
11924     //and M is CPP maximum throughput.
11925     float max_fps = 0.0;
11926     for (uint32_t i = 0;
11927             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11928         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11929             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11930     }
11931     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11932             gCamCapability[cameraId]->active_array_size.width *
11933             gCamCapability[cameraId]->active_array_size.height * max_fps /
11934             gCamCapability[cameraId]->max_pixel_bandwidth;
11935     info->resource_cost = 100 * MIN(1.0, ratio);
11936     LOGI("camera %d resource cost is %d", cameraId,
11937             info->resource_cost);
11938 
11939     pthread_mutex_unlock(&gCamLock);
11940     return rc;
11941 }
11942 
11943 /*===========================================================================
11944  * FUNCTION   : translateCapabilityToMetadata
11945  *
11946  * DESCRIPTION: translate the capability into camera_metadata_t
11947  *
11948  * PARAMETERS : type of the request
11949  *
11950  *
11951  * RETURN     : success: camera_metadata_t*
11952  *              failure: NULL
11953  *
11954  *==========================================================================*/
translateCapabilityToMetadata(int type)11955 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11956 {
11957     if (mDefaultMetadata[type] != NULL) {
11958         return mDefaultMetadata[type];
11959     }
11960     //first time we are handling this request
11961     //fill up the metadata structure using the wrapper class
11962     CameraMetadata settings;
11963     //translate from cam_capability_t to camera_metadata_tag_t
11964     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11965     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11966     int32_t defaultRequestID = 0;
11967     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11968 
11969     /* OIS disable */
11970     char ois_prop[PROPERTY_VALUE_MAX];
11971     memset(ois_prop, 0, sizeof(ois_prop));
11972     property_get("persist.camera.ois.disable", ois_prop, "0");
11973     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11974 
11975     /* Force video to use OIS */
11976     char videoOisProp[PROPERTY_VALUE_MAX];
11977     memset(videoOisProp, 0, sizeof(videoOisProp));
11978     property_get("persist.camera.ois.video", videoOisProp, "1");
11979     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
11980 
11981     // Hybrid AE enable/disable
11982     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11983     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11984     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11985     uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11986 
11987     uint8_t controlIntent = 0;
11988     uint8_t focusMode;
11989     uint8_t vsMode;
11990     uint8_t optStabMode;
11991     uint8_t cacMode;
11992     uint8_t edge_mode;
11993     uint8_t noise_red_mode;
11994     uint8_t shading_mode;
11995     uint8_t hot_pixel_mode;
11996     uint8_t tonemap_mode;
11997     bool highQualityModeEntryAvailable = FALSE;
11998     bool fastModeEntryAvailable = FALSE;
11999     uint8_t histogramEnable = false;
12000     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
12001     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12002     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
12003     uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
12004     uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
12005 
12006     switch (type) {
12007       case CAMERA3_TEMPLATE_PREVIEW:
12008         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
12009         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12010         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12011         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12012         edge_mode = ANDROID_EDGE_MODE_FAST;
12013         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12014         shading_mode = ANDROID_SHADING_MODE_FAST;
12015         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12016         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12017         break;
12018       case CAMERA3_TEMPLATE_STILL_CAPTURE:
12019         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
12020         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12021         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12022         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
12023         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
12024         shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
12025         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
12026         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
12027         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
12028         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
12029         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12030             if (gCamCapability[mCameraId]->aberration_modes[i] ==
12031                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12032                 highQualityModeEntryAvailable = TRUE;
12033             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
12034                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12035                 fastModeEntryAvailable = TRUE;
12036             }
12037         }
12038         if (highQualityModeEntryAvailable) {
12039             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
12040         } else if (fastModeEntryAvailable) {
12041             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12042         }
12043         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
12044             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
12045         }
12046         enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
12047         break;
12048       case CAMERA3_TEMPLATE_VIDEO_RECORD:
12049         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
12050         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
12051         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12052         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12053         edge_mode = ANDROID_EDGE_MODE_FAST;
12054         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12055         shading_mode = ANDROID_SHADING_MODE_FAST;
12056         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12057         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12058         if (forceVideoOis)
12059             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12060         break;
12061       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
12062         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
12063         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
12064         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12065         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12066         edge_mode = ANDROID_EDGE_MODE_FAST;
12067         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12068         shading_mode = ANDROID_SHADING_MODE_FAST;
12069         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12070         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12071         if (forceVideoOis)
12072             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12073         break;
12074       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
12075         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
12076         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12077         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12078         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12079         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
12080         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
12081         shading_mode = ANDROID_SHADING_MODE_FAST;
12082         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12083         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12084         break;
12085       case CAMERA3_TEMPLATE_MANUAL:
12086         edge_mode = ANDROID_EDGE_MODE_FAST;
12087         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12088         shading_mode = ANDROID_SHADING_MODE_FAST;
12089         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12090         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12091         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12092         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
12093         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12094         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12095         break;
12096       default:
12097         edge_mode = ANDROID_EDGE_MODE_FAST;
12098         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12099         shading_mode = ANDROID_SHADING_MODE_FAST;
12100         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12101         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12102         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12103         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
12104         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12105         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12106         break;
12107     }
12108     // Set CAC to OFF if underlying device doesn't support
12109     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12110         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
12111     }
12112     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
12113     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
12114     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
12115     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
12116         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12117     }
12118     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
12119     settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
12120     settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
12121 
12122     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12123             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
12124         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12125     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12126             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
12127             || ois_disable)
12128         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12129     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
12130     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
12131 
12132     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
12133             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
12134 
12135     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
12136     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
12137 
12138     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
12139     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
12140 
12141     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
12142     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
12143 
12144     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
12145     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
12146 
12147     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
12148     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
12149 
12150     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
12151     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
12152 
12153     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
12154     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
12155 
12156     /*flash*/
12157     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
12158     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
12159 
12160     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
12161     settings.update(ANDROID_FLASH_FIRING_POWER,
12162             &flashFiringLevel, 1);
12163 
12164     /* lens */
12165     float default_aperture = gCamCapability[mCameraId]->apertures[0];
12166     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
12167 
12168     if (gCamCapability[mCameraId]->filter_densities_count) {
12169         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
12170         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
12171                         gCamCapability[mCameraId]->filter_densities_count);
12172     }
12173 
12174     float default_focal_length = gCamCapability[mCameraId]->focal_length;
12175     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
12176 
12177     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
12178     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
12179 
12180     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
12181     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
12182 
12183     /* face detection (default to OFF) */
12184     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
12185     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
12186 
12187     static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
12188     settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
12189 
12190     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
12191     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
12192 
12193     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
12194     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
12195 
12196 
12197     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12198     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
12199 
12200     /* Exposure time(Update the Min Exposure Time)*/
12201     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
12202     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
12203 
12204     /* frame duration */
12205     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
12206     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
12207 
12208     /* sensitivity */
12209     static const int32_t default_sensitivity = 100;
12210     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
12211 #ifndef USE_HAL_3_3
12212     static const int32_t default_isp_sensitivity =
12213             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12214     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
12215 #endif
12216 
12217     /*edge mode*/
12218     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
12219 
12220     /*noise reduction mode*/
12221     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
12222 
12223     /*shading mode*/
12224     settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
12225 
12226     /*hot pixel mode*/
12227     settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
12228 
12229     /*color correction mode*/
12230     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
12231     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
12232 
12233     /*transform matrix mode*/
12234     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
12235 
12236     int32_t scaler_crop_region[4];
12237     scaler_crop_region[0] = 0;
12238     scaler_crop_region[1] = 0;
12239     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
12240     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
12241     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
12242 
12243     float zoom_ratio = 1.0f;
12244     settings.update(ANDROID_CONTROL_ZOOM_RATIO, &zoom_ratio, 1);
12245 
12246     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
12247     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
12248 
12249     /*focus distance*/
12250     float focus_distance = 0.0;
12251     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
12252 
12253     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
12254     /* Restrict template max_fps to 30 */
12255     float max_range = 0.0;
12256     float max_fixed_fps = 0.0;
12257     int32_t fps_range[2] = {0, 0};
12258     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
12259             i++) {
12260         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
12261                 TEMPLATE_MAX_PREVIEW_FPS) {
12262             continue;
12263         }
12264         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
12265             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12266         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12267                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12268                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
12269             if (range > max_range) {
12270                 fps_range[0] =
12271                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12272                 fps_range[1] =
12273                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12274                 max_range = range;
12275             }
12276         } else {
12277             if (range < 0.01 && max_fixed_fps <
12278                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
12279                 fps_range[0] =
12280                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12281                 fps_range[1] =
12282                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12283                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12284             }
12285         }
12286     }
12287     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
12288 
12289     /*precapture trigger*/
12290     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
12291     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
12292 
12293     /*af trigger*/
12294     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
12295     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
12296 
12297     /* ae & af regions */
12298     int32_t active_region[] = {
12299             gCamCapability[mCameraId]->active_array_size.left,
12300             gCamCapability[mCameraId]->active_array_size.top,
12301             gCamCapability[mCameraId]->active_array_size.left +
12302                     gCamCapability[mCameraId]->active_array_size.width,
12303             gCamCapability[mCameraId]->active_array_size.top +
12304                     gCamCapability[mCameraId]->active_array_size.height,
12305             0};
12306     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
12307             sizeof(active_region) / sizeof(active_region[0]));
12308     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
12309             sizeof(active_region) / sizeof(active_region[0]));
12310 
12311     /* black level lock */
12312     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12313     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
12314 
12315     //special defaults for manual template
12316     if (type == CAMERA3_TEMPLATE_MANUAL) {
12317         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
12318         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
12319 
12320         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
12321         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
12322 
12323         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
12324         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
12325 
12326         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
12327         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
12328 
12329         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
12330         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
12331 
12332         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
12333         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
12334     }
12335 
12336 
12337     /* TNR
12338      * We'll use this location to determine which modes TNR will be set.
12339      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
12340      * This is not to be confused with linking on a per stream basis that decision
12341      * is still on per-session basis and will be handled as part of config stream
12342      */
12343     uint8_t tnr_enable = 0;
12344 
12345     if (m_bTnrPreview || m_bTnrVideo) {
12346 
12347         switch (type) {
12348             case CAMERA3_TEMPLATE_VIDEO_RECORD:
12349                     tnr_enable = 1;
12350                     break;
12351 
12352             default:
12353                     tnr_enable = 0;
12354                     break;
12355         }
12356 
12357         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
12358         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
12359         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
12360 
12361         LOGD("TNR:%d with process plate %d for template:%d",
12362                              tnr_enable, tnr_process_type, type);
12363     }
12364 
12365     //Update Link tags to default
12366     uint8_t sync_type = CAM_TYPE_STANDALONE;
12367     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
12368 
12369     uint8_t is_main = 1;
12370     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
12371 
12372     uint8_t related_camera_id = mCameraId;
12373     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
12374 
12375     /* CDS default */
12376     char prop[PROPERTY_VALUE_MAX];
12377     memset(prop, 0, sizeof(prop));
12378     property_get("persist.camera.CDS", prop, "Auto");
12379     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
12380     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
12381     if (CAM_CDS_MODE_MAX == cds_mode) {
12382         cds_mode = CAM_CDS_MODE_AUTO;
12383     }
12384 
12385     /* Disabling CDS in templates which have TNR enabled*/
12386     if (tnr_enable)
12387         cds_mode = CAM_CDS_MODE_OFF;
12388 
12389     int32_t mode = cds_mode;
12390     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
12391 
12392     /* Manual Convergence AEC Speed is disabled by default*/
12393     float default_aec_speed = 0;
12394     settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
12395 
12396     /* Manual Convergence AWB Speed is disabled by default*/
12397     float default_awb_speed = 0;
12398     settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
12399 
12400     // Set instant AEC to normal convergence by default
12401     uint8_t instant_aec_mode = (uint8_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
12402     settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
12403 
12404     uint8_t oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_OFF;
12405     if (mCameraId == 0) {
12406         oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_ON;
12407     }
12408     settings.update(ANDROID_STATISTICS_OIS_DATA_MODE, &oisDataMode, 1);
12409 
12410     if (gExposeEnableZslKey) {
12411         settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
12412         int32_t postview = 0;
12413         settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
12414         int32_t continuousZslCapture = 0;
12415         settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
12416         // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
12417         // CAMERA3_TEMPLATE_PREVIEW.
12418         int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12419                                   type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
12420         settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
12421 
12422         // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
12423         // hybrid ae is enabled for 3rd party app HDR+.
12424         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12425                 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
12426             hybrid_ae = 1;
12427         }
12428     }
12429     /* hybrid ae */
12430     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
12431 
12432     int32_t fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
12433     settings.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
12434 
12435     mDefaultMetadata[type] = settings.release();
12436 
12437     return mDefaultMetadata[type];
12438 }
12439 
12440 /*===========================================================================
12441  * FUNCTION   : getExpectedFrameDuration
12442  *
12443  * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
12444  *              duration
12445  *
12446  * PARAMETERS :
12447  *   @request   : request settings
12448  *   @frameDuration : The maximum frame duration in nanoseconds
12449  *
12450  * RETURN     : None
12451  *==========================================================================*/
getExpectedFrameDuration(const camera_metadata_t * request,nsecs_t * frameDuration)12452 void QCamera3HardwareInterface::getExpectedFrameDuration(
12453         const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
12454     if (nullptr == frameDuration) {
12455         return;
12456     }
12457 
12458     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12459     find_camera_metadata_ro_entry(request,
12460             ANDROID_SENSOR_EXPOSURE_TIME,
12461             &e);
12462     if (e.count > 0) {
12463         *frameDuration = e.data.i64[0];
12464     }
12465     find_camera_metadata_ro_entry(request,
12466             ANDROID_SENSOR_FRAME_DURATION,
12467             &e);
12468     if (e.count > 0) {
12469         *frameDuration = std::max(e.data.i64[0], *frameDuration);
12470     }
12471 }
12472 
12473 /*===========================================================================
12474  * FUNCTION   : calculateMaxExpectedDuration
12475  *
12476  * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
12477  *              current camera settings.
12478  *
12479  * PARAMETERS :
12480  *   @request   : request settings
12481  *
12482  * RETURN     : Expected frame duration in nanoseconds.
12483  *==========================================================================*/
calculateMaxExpectedDuration(const camera_metadata_t * request)12484 nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
12485         const camera_metadata_t *request) {
12486     nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
12487     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12488     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
12489     if (e.count == 0) {
12490         return maxExpectedDuration;
12491     }
12492 
12493     if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
12494         getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12495     }
12496 
12497     if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
12498         return maxExpectedDuration;
12499     }
12500 
12501     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
12502     if (e.count == 0) {
12503         return maxExpectedDuration;
12504     }
12505 
12506     switch (e.data.u8[0]) {
12507         case ANDROID_CONTROL_AE_MODE_OFF:
12508             getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12509             break;
12510         default:
12511             find_camera_metadata_ro_entry(request,
12512                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
12513                     &e);
12514             if (e.count > 1) {
12515                 maxExpectedDuration = 1e9 / e.data.u8[0];
12516             }
12517             break;
12518     }
12519 
12520     return maxExpectedDuration;
12521 }
12522 
12523 /*===========================================================================
12524  * FUNCTION   : setFrameParameters
12525  *
12526  * DESCRIPTION: set parameters per frame as requested in the metadata from
12527  *              framework
12528  *
12529  * PARAMETERS :
12530  *   @request   : request that needs to be serviced
12531  *   @streamsArray : Stream ID of all the requested streams
12532  *   @blob_request: Whether this request is a blob request or not
12533  *
12534  * RETURN     : success: NO_ERROR
12535  *              failure:
12536  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)12537 int QCamera3HardwareInterface::setFrameParameters(
12538                     camera3_capture_request_t *request,
12539                     cam_stream_ID_t streamsArray,
12540                     int blob_request,
12541                     uint32_t snapshotStreamId)
12542 {
12543     /*translate from camera_metadata_t type to parm_type_t*/
12544     int rc = 0;
12545     int32_t hal_version = CAM_HAL_V3;
12546 
12547     clear_metadata_buffer(mParameters);
12548     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
12549         LOGE("Failed to set hal version in the parameters");
12550         return BAD_VALUE;
12551     }
12552 
12553     /*we need to update the frame number in the parameters*/
12554     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
12555             request->frame_number)) {
12556         LOGE("Failed to set the frame number in the parameters");
12557         return BAD_VALUE;
12558     }
12559 
12560     /* Update stream id of all the requested buffers */
12561     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
12562         LOGE("Failed to set stream type mask in the parameters");
12563         return BAD_VALUE;
12564     }
12565 
12566     if (mUpdateDebugLevel) {
12567         uint32_t dummyDebugLevel = 0;
12568         /* The value of dummyDebugLevel is irrelavent. On
12569          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
12570         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
12571                 dummyDebugLevel)) {
12572             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
12573             return BAD_VALUE;
12574         }
12575         mUpdateDebugLevel = false;
12576     }
12577 
12578     if(request->settings != NULL){
12579         mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
12580         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
12581         if (blob_request)
12582             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
12583     }
12584 
12585     return rc;
12586 }
12587 
12588 /*===========================================================================
12589  * FUNCTION   : setReprocParameters
12590  *
12591  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
12592  *              return it.
12593  *
12594  * PARAMETERS :
12595  *   @request   : request that needs to be serviced
12596  *
12597  * RETURN     : success: NO_ERROR
12598  *              failure:
12599  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)12600 int32_t QCamera3HardwareInterface::setReprocParameters(
12601         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
12602         uint32_t snapshotStreamId)
12603 {
12604     /*translate from camera_metadata_t type to parm_type_t*/
12605     int rc = 0;
12606 
12607     if (NULL == request->settings){
12608         LOGE("Reprocess settings cannot be NULL");
12609         return BAD_VALUE;
12610     }
12611 
12612     if (NULL == reprocParam) {
12613         LOGE("Invalid reprocessing metadata buffer");
12614         return BAD_VALUE;
12615     }
12616     clear_metadata_buffer(reprocParam);
12617 
12618     /*we need to update the frame number in the parameters*/
12619     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
12620             request->frame_number)) {
12621         LOGE("Failed to set the frame number in the parameters");
12622         return BAD_VALUE;
12623     }
12624 
12625     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
12626     if (rc < 0) {
12627         LOGE("Failed to translate reproc request");
12628         return rc;
12629     }
12630 
12631     CameraMetadata frame_settings;
12632     frame_settings = request->settings;
12633     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
12634             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
12635         int32_t *crop_count =
12636                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
12637         int32_t *crop_data =
12638                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
12639         int32_t *roi_map =
12640                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
12641         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
12642             cam_crop_data_t crop_meta;
12643             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
12644             crop_meta.num_of_streams = 1;
12645             crop_meta.crop_info[0].crop.left   = crop_data[0];
12646             crop_meta.crop_info[0].crop.top    = crop_data[1];
12647             crop_meta.crop_info[0].crop.width  = crop_data[2];
12648             crop_meta.crop_info[0].crop.height = crop_data[3];
12649 
12650             crop_meta.crop_info[0].roi_map.left =
12651                     roi_map[0];
12652             crop_meta.crop_info[0].roi_map.top =
12653                     roi_map[1];
12654             crop_meta.crop_info[0].roi_map.width =
12655                     roi_map[2];
12656             crop_meta.crop_info[0].roi_map.height =
12657                     roi_map[3];
12658 
12659             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
12660                 rc = BAD_VALUE;
12661             }
12662             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
12663                     request->input_buffer->stream,
12664                     crop_meta.crop_info[0].crop.left,
12665                     crop_meta.crop_info[0].crop.top,
12666                     crop_meta.crop_info[0].crop.width,
12667                     crop_meta.crop_info[0].crop.height);
12668             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
12669                     request->input_buffer->stream,
12670                     crop_meta.crop_info[0].roi_map.left,
12671                     crop_meta.crop_info[0].roi_map.top,
12672                     crop_meta.crop_info[0].roi_map.width,
12673                     crop_meta.crop_info[0].roi_map.height);
12674             } else {
12675                 LOGE("Invalid reprocess crop count %d!", *crop_count);
12676             }
12677     } else {
12678         LOGE("No crop data from matching output stream");
12679     }
12680 
12681     /* These settings are not needed for regular requests so handle them specially for
12682        reprocess requests; information needed for EXIF tags */
12683     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12684         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12685                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12686         if (NAME_NOT_FOUND != val) {
12687             uint32_t flashMode = (uint32_t)val;
12688             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12689                 rc = BAD_VALUE;
12690             }
12691         } else {
12692             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12693                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12694         }
12695     } else {
12696         LOGH("No flash mode in reprocess settings");
12697     }
12698 
12699     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12700         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12701         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12702             rc = BAD_VALUE;
12703         }
12704     } else {
12705         LOGH("No flash state in reprocess settings");
12706     }
12707 
12708     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12709         uint8_t *reprocessFlags =
12710             frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12711         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12712                 *reprocessFlags)) {
12713                 rc = BAD_VALUE;
12714         }
12715     }
12716 
12717     // Add exif debug data to internal metadata
12718     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12719         mm_jpeg_debug_exif_params_t *debug_params =
12720                 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12721                 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12722         // AE
12723         if (debug_params->ae_debug_params_valid == TRUE) {
12724             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12725                     debug_params->ae_debug_params);
12726         }
12727         // AWB
12728         if (debug_params->awb_debug_params_valid == TRUE) {
12729             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12730                 debug_params->awb_debug_params);
12731         }
12732         // AF
12733        if (debug_params->af_debug_params_valid == TRUE) {
12734             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12735                    debug_params->af_debug_params);
12736         }
12737         // ASD
12738         if (debug_params->asd_debug_params_valid == TRUE) {
12739             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12740                     debug_params->asd_debug_params);
12741         }
12742         // Stats
12743         if (debug_params->stats_debug_params_valid == TRUE) {
12744             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12745                     debug_params->stats_debug_params);
12746        }
12747         // BE Stats
12748         if (debug_params->bestats_debug_params_valid == TRUE) {
12749             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12750                     debug_params->bestats_debug_params);
12751         }
12752         // BHIST
12753         if (debug_params->bhist_debug_params_valid == TRUE) {
12754             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12755                     debug_params->bhist_debug_params);
12756        }
12757         // 3A Tuning
12758         if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12759             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12760                     debug_params->q3a_tuning_debug_params);
12761         }
12762     }
12763 
12764     // Add metadata which reprocess needs
12765     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12766         cam_reprocess_info_t *repro_info =
12767                 (cam_reprocess_info_t *)frame_settings.find
12768                 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
12769         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
12770                 repro_info->sensor_crop_info);
12771         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
12772                 repro_info->camif_crop_info);
12773         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
12774                 repro_info->isp_crop_info);
12775         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
12776                 repro_info->cpp_crop_info);
12777         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
12778                 repro_info->af_focal_length_ratio);
12779         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
12780                 repro_info->pipeline_flip);
12781         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12782                 repro_info->af_roi);
12783         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12784                 repro_info->dyn_mask);
12785         /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12786            CAM_INTF_PARM_ROTATION metadata then has been added in
12787            translateToHalMetadata. HAL need to keep this new rotation
12788            metadata. Otherwise, the old rotation info saved in the vendor tag
12789            would be used */
12790         IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12791                 CAM_INTF_PARM_ROTATION, reprocParam) {
12792             LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12793         } else {
12794             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
12795                     repro_info->rotation_info);
12796         }
12797     }
12798 
12799     /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12800        to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12801        roi.width and roi.height would be the final JPEG size.
12802        For now, HAL only checks this for reprocess request */
12803     if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12804             frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12805         uint8_t *enable =
12806             frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12807         if (*enable == TRUE) {
12808             int32_t *crop_data =
12809                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12810             cam_stream_crop_info_t crop_meta;
12811             memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12812             crop_meta.stream_id = 0;
12813             crop_meta.crop.left   = crop_data[0];
12814             crop_meta.crop.top    = crop_data[1];
12815             crop_meta.crop.width  = crop_data[2];
12816             crop_meta.crop.height = crop_data[3];
12817             // The JPEG crop roi should match cpp output size
12818             IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12819                     CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12820                 crop_meta.roi_map.left = 0;
12821                 crop_meta.roi_map.top = 0;
12822                 crop_meta.roi_map.width = cpp_crop->crop.width;
12823                 crop_meta.roi_map.height = cpp_crop->crop.height;
12824             }
12825             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12826                     crop_meta);
12827             LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
12828                     crop_meta.crop.left, crop_meta.crop.top,
12829                     crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12830             LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
12831                     crop_meta.roi_map.left, crop_meta.roi_map.top,
12832                     crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12833 
12834             // Add JPEG scale information
12835             cam_dimension_t scale_dim;
12836             memset(&scale_dim, 0, sizeof(cam_dimension_t));
12837             if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12838                 int32_t *roi =
12839                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12840                 scale_dim.width = roi[2];
12841                 scale_dim.height = roi[3];
12842                 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12843                     scale_dim);
12844                 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12845                     scale_dim.width, scale_dim.height, mCameraId);
12846             }
12847         }
12848     }
12849 
12850     return rc;
12851 }
12852 
12853 /*===========================================================================
12854  * FUNCTION   : saveRequestSettings
12855  *
12856  * DESCRIPTION: Add any settings that might have changed to the request settings
12857  *              and save the settings to be applied on the frame
12858  *
12859  * PARAMETERS :
12860  *   @jpegMetadata : the extracted and/or modified jpeg metadata
12861  *   @request      : request with initial settings
12862  *
12863  * RETURN     :
12864  * camera_metadata_t* : pointer to the saved request settings
12865  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)12866 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12867         const CameraMetadata &jpegMetadata,
12868         camera3_capture_request_t *request)
12869 {
12870     camera_metadata_t *resultMetadata;
12871     CameraMetadata camMetadata;
12872     camMetadata = request->settings;
12873 
12874     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12875         int32_t thumbnail_size[2];
12876         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12877         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12878         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12879                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12880     }
12881 
12882     if (request->input_buffer != NULL) {
12883         uint8_t reprocessFlags = 1;
12884         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12885                 (uint8_t*)&reprocessFlags,
12886                 sizeof(reprocessFlags));
12887     }
12888 
12889     resultMetadata = camMetadata.release();
12890     return resultMetadata;
12891 }
12892 
12893 /*===========================================================================
12894  * FUNCTION   : setHalFpsRange
12895  *
12896  * DESCRIPTION: set FPS range parameter
12897  *
12898  *
12899  * PARAMETERS :
12900  *   @settings    : Metadata from framework
12901  *   @hal_metadata: Metadata buffer
12902  *
12903  *
12904  * RETURN     : success: NO_ERROR
12905  *              failure:
12906  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)12907 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12908         metadata_buffer_t *hal_metadata)
12909 {
12910     int32_t rc = NO_ERROR;
12911     cam_fps_range_t fps_range;
12912     fps_range.min_fps = (float)
12913             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12914     fps_range.max_fps = (float)
12915             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12916     fps_range.video_min_fps = fps_range.min_fps;
12917     fps_range.video_max_fps = fps_range.max_fps;
12918 
12919     LOGD("aeTargetFpsRange fps: [%f %f]",
12920             fps_range.min_fps, fps_range.max_fps);
12921     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12922      * follows:
12923      * ---------------------------------------------------------------|
12924      *      Video stream is absent in configure_streams               |
12925      *    (Camcorder preview before the first video record            |
12926      * ---------------------------------------------------------------|
12927      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12928      *                   |             |             | vid_min/max_fps|
12929      * ---------------------------------------------------------------|
12930      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12931      *                   |-------------|-------------|----------------|
12932      *                   |  [240, 240] |     240     |  [240, 240]    |
12933      * ---------------------------------------------------------------|
12934      *     Video stream is present in configure_streams               |
12935      * ---------------------------------------------------------------|
12936      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12937      *                   |             |             | vid_min/max_fps|
12938      * ---------------------------------------------------------------|
12939      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12940      * (camcorder prev   |-------------|-------------|----------------|
12941      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
12942      *  is stopped)      |             |             |                |
12943      * ---------------------------------------------------------------|
12944      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
12945      *                   |-------------|-------------|----------------|
12946      *                   |  [240, 240] |     240     |  [240, 240]    |
12947      * ---------------------------------------------------------------|
12948      * When Video stream is absent in configure_streams,
12949      * preview fps = sensor_fps / batchsize
12950      * Eg: for 240fps at batchSize 4, preview = 60fps
12951      *     for 120fps at batchSize 4, preview = 30fps
12952      *
12953      * When video stream is present in configure_streams, preview fps is as per
12954      * the ratio of preview buffers to video buffers requested in process
12955      * capture request
12956      */
12957     mBatchSize = 0;
12958     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12959         fps_range.min_fps = fps_range.video_max_fps;
12960         fps_range.video_min_fps = fps_range.video_max_fps;
12961         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12962                 fps_range.max_fps);
12963         if (NAME_NOT_FOUND != val) {
12964             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12965             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12966                 return BAD_VALUE;
12967             }
12968 
12969             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12970                 /* If batchmode is currently in progress and the fps changes,
12971                  * set the flag to restart the sensor */
12972                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12973                         (mHFRVideoFps != fps_range.max_fps)) {
12974                     mNeedSensorRestart = true;
12975                 }
12976                 mHFRVideoFps = fps_range.max_fps;
12977                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12978                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12979                     mBatchSize = MAX_HFR_BATCH_SIZE;
12980                 }
12981              }
12982             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12983 
12984          }
12985     } else {
12986         /* HFR mode is session param in backend/ISP. This should be reset when
12987          * in non-HFR mode  */
12988         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12989         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12990             return BAD_VALUE;
12991         }
12992     }
12993     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12994         return BAD_VALUE;
12995     }
12996     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12997             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12998     return rc;
12999 }
13000 
13001 /*===========================================================================
13002  * FUNCTION   : translateToHalMetadata
13003  *
13004  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
13005  *
13006  *
13007  * PARAMETERS :
13008  *   @request  : request sent from framework
13009  *   @hal_metadata: Hal specific metadata buffer
13010  *   @snapshotStreamId: Snapshot stream ID.
13011  *
13012  * RETURN     : success: NO_ERROR
13013  *              failure:
13014  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)13015 int QCamera3HardwareInterface::translateToHalMetadata
13016                                   (const camera3_capture_request_t *request,
13017                                    metadata_buffer_t *hal_metadata,
13018                                    uint32_t snapshotStreamId) {
13019     if (request == nullptr || hal_metadata == nullptr) {
13020         return BAD_VALUE;
13021     }
13022 
13023     int64_t minFrameDuration = getMinFrameDuration(request);
13024 
13025     return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
13026             minFrameDuration);
13027 }
13028 
translateFwkMetadataToHalMetadata(const camera_metadata_t * frameworkMetadata,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId,int64_t minFrameDuration)13029 int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
13030         const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
13031         uint32_t snapshotStreamId, int64_t minFrameDuration) {
13032 
13033     int rc = 0;
13034     CameraMetadata frame_settings;
13035     frame_settings = frameworkMetadata;
13036 
13037     /* Do not change the order of the following list unless you know what you are
13038      * doing.
13039      * The order is laid out in such a way that parameters in the front of the table
13040      * may be used to override the parameters later in the table. Examples are:
13041      * 1. META_MODE should precede AEC/AWB/AF MODE
13042      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
13043      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
13044      * 4. Any mode should precede it's corresponding settings
13045      */
13046     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
13047         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
13048         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
13049             rc = BAD_VALUE;
13050         }
13051         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
13052         if (rc != NO_ERROR) {
13053             LOGE("extractSceneMode failed");
13054         }
13055     }
13056 
13057     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13058         uint8_t fwk_aeMode =
13059             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13060         uint8_t aeMode;
13061         int32_t redeye;
13062 
13063         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
13064             aeMode = CAM_AE_MODE_OFF;
13065         } else if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH) {
13066             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
13067         } else {
13068             aeMode = CAM_AE_MODE_ON;
13069         }
13070         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13071             redeye = 1;
13072         } else {
13073             redeye = 0;
13074         }
13075 
13076         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
13077                 fwk_aeMode);
13078         if (NAME_NOT_FOUND != val) {
13079             int32_t flashMode = (int32_t)val;
13080             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
13081         }
13082 
13083         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
13084         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
13085             rc = BAD_VALUE;
13086         }
13087     }
13088 
13089     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
13090         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
13091         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
13092                 fwk_whiteLevel);
13093         if (NAME_NOT_FOUND != val) {
13094             uint8_t whiteLevel = (uint8_t)val;
13095             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
13096                 rc = BAD_VALUE;
13097             }
13098         }
13099     }
13100 
13101     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
13102         uint8_t fwk_cacMode =
13103                 frame_settings.find(
13104                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
13105         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
13106                 fwk_cacMode);
13107         if (NAME_NOT_FOUND != val) {
13108             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
13109             bool entryAvailable = FALSE;
13110             // Check whether Frameworks set CAC mode is supported in device or not
13111             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
13112                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
13113                     entryAvailable = TRUE;
13114                     break;
13115                 }
13116             }
13117             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
13118             // If entry not found then set the device supported mode instead of frameworks mode i.e,
13119             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
13120             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
13121             if (entryAvailable == FALSE) {
13122                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
13123                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13124                 } else {
13125                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
13126                         // High is not supported and so set the FAST as spec say's underlying
13127                         // device implementation can be the same for both modes.
13128                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
13129                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
13130                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
13131                         // in order to avoid the fps drop due to high quality
13132                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13133                     } else {
13134                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13135                     }
13136                 }
13137             }
13138             LOGD("Final cacMode is %d", cacMode);
13139             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
13140                 rc = BAD_VALUE;
13141             }
13142         } else {
13143             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
13144         }
13145     }
13146 
13147     uint8_t fwk_focusMode = 0;
13148     if (m_bForceInfinityAf == 0) {
13149         if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
13150             fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
13151             int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
13152                     fwk_focusMode);
13153             if (NAME_NOT_FOUND != val) {
13154                 uint8_t focusMode = (uint8_t)val;
13155                 LOGD("set focus mode %d", focusMode);
13156                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13157                          CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13158                     rc = BAD_VALUE;
13159                 }
13160             }
13161         } else {
13162             LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
13163         }
13164     } else {
13165         uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
13166         LOGE("Focus forced to infinity %d", focusMode);
13167         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13168             rc = BAD_VALUE;
13169         }
13170     }
13171 
13172     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
13173             fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
13174         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
13175         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
13176                 focalDistance)) {
13177             rc = BAD_VALUE;
13178         }
13179     }
13180 
13181     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
13182         uint8_t fwk_antibandingMode =
13183                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
13184         int val = lookupHalName(ANTIBANDING_MODES_MAP,
13185                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
13186         if (NAME_NOT_FOUND != val) {
13187             uint32_t hal_antibandingMode = (uint32_t)val;
13188             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
13189                 if (m60HzZone) {
13190                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
13191                 } else {
13192                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
13193                 }
13194             }
13195             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
13196                     hal_antibandingMode)) {
13197                 rc = BAD_VALUE;
13198             }
13199         }
13200     }
13201 
13202     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
13203         int32_t expCompensation = frame_settings.find(
13204                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
13205         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
13206             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
13207         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
13208             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
13209         LOGD("Setting compensation:%d", expCompensation);
13210         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
13211                 expCompensation)) {
13212             rc = BAD_VALUE;
13213         }
13214     }
13215 
13216     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
13217         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
13218         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
13219             rc = BAD_VALUE;
13220         }
13221     }
13222     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
13223         rc = setHalFpsRange(frame_settings, hal_metadata);
13224         if (rc != NO_ERROR) {
13225             LOGE("setHalFpsRange failed");
13226         }
13227     }
13228 
13229     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
13230         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
13231         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
13232             rc = BAD_VALUE;
13233         }
13234     }
13235 
13236     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
13237         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
13238         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
13239                 fwk_effectMode);
13240         if (NAME_NOT_FOUND != val) {
13241             uint8_t effectMode = (uint8_t)val;
13242             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
13243                 rc = BAD_VALUE;
13244             }
13245         }
13246     }
13247 
13248     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
13249         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
13250         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
13251                 colorCorrectMode)) {
13252             rc = BAD_VALUE;
13253         }
13254     }
13255 
13256     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
13257         cam_color_correct_gains_t colorCorrectGains;
13258         for (size_t i = 0; i < CC_GAIN_MAX; i++) {
13259             colorCorrectGains.gains[i] =
13260                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
13261         }
13262         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
13263                 colorCorrectGains)) {
13264             rc = BAD_VALUE;
13265         }
13266     }
13267 
13268     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
13269         cam_color_correct_matrix_t colorCorrectTransform;
13270         cam_rational_type_t transform_elem;
13271         size_t num = 0;
13272         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
13273            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
13274               transform_elem.numerator =
13275                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
13276               transform_elem.denominator =
13277                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
13278               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
13279               num++;
13280            }
13281         }
13282         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
13283                 colorCorrectTransform)) {
13284             rc = BAD_VALUE;
13285         }
13286     }
13287 
13288     cam_trigger_t aecTrigger;
13289     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
13290     aecTrigger.trigger_id = -1;
13291     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
13292         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
13293         aecTrigger.trigger =
13294             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
13295         aecTrigger.trigger_id =
13296             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
13297         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
13298                 aecTrigger)) {
13299             rc = BAD_VALUE;
13300         }
13301         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
13302                 aecTrigger.trigger, aecTrigger.trigger_id);
13303     }
13304 
13305     /*af_trigger must come with a trigger id*/
13306     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
13307         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
13308         cam_trigger_t af_trigger;
13309         af_trigger.trigger =
13310             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
13311         af_trigger.trigger_id =
13312             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
13313         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
13314             rc = BAD_VALUE;
13315         }
13316         LOGD("AfTrigger: %d AfTriggerID: %d",
13317                 af_trigger.trigger, af_trigger.trigger_id);
13318     }
13319 
13320     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
13321         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
13322         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
13323             rc = BAD_VALUE;
13324         }
13325     }
13326     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
13327         cam_edge_application_t edge_application;
13328         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
13329 
13330         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
13331             edge_application.sharpness = 0;
13332         } else {
13333             edge_application.sharpness =
13334                     gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
13335             if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
13336                 int32_t sharpness =
13337                         frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
13338                 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
13339                     sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
13340                     LOGD("Setting edge mode sharpness %d", sharpness);
13341                     edge_application.sharpness = sharpness;
13342                 }
13343             }
13344         }
13345         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
13346             rc = BAD_VALUE;
13347         }
13348     }
13349 
13350     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
13351         uint32_t flashMode = (uint32_t)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
13352         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_MODE, flashMode)) {
13353             rc = BAD_VALUE;
13354         }
13355 
13356         int32_t respectFlashMode = 1;
13357         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13358             uint8_t fwk_aeMode =
13359                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13360             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
13361                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
13362                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13363                 respectFlashMode = 0;
13364                 LOGH("AE Mode controls flash, ignore android.flash.mode");
13365             }
13366         }
13367         if (respectFlashMode) {
13368             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
13369                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
13370             LOGH("flash mode after mapping %d", val);
13371             // To check: CAM_INTF_META_FLASH_MODE usage
13372             if (NAME_NOT_FOUND != val) {
13373                 uint8_t ledMode = (uint8_t)val;
13374                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, ledMode)) {
13375                     rc = BAD_VALUE;
13376                 }
13377             }
13378         }
13379     }
13380 
13381     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
13382         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.i32[0];
13383         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_STATE, flashState)) {
13384             rc = BAD_VALUE;
13385         }
13386     }
13387 
13388     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
13389         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
13390         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
13391             rc = BAD_VALUE;
13392         }
13393     }
13394 
13395     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
13396         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
13397         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
13398                 flashFiringTime)) {
13399             rc = BAD_VALUE;
13400         }
13401     }
13402 
13403     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
13404         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
13405         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
13406                 hotPixelMode)) {
13407             rc = BAD_VALUE;
13408         }
13409     }
13410 
13411     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
13412         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
13413         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
13414                 lensAperture)) {
13415             rc = BAD_VALUE;
13416         }
13417     }
13418 
13419     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
13420         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
13421         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
13422                 filterDensity)) {
13423             rc = BAD_VALUE;
13424         }
13425     }
13426 
13427     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
13428         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
13429         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
13430                 focalLength)) {
13431             rc = BAD_VALUE;
13432         }
13433     }
13434 
13435     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
13436         uint8_t optStabMode =
13437                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
13438         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
13439                 optStabMode)) {
13440             rc = BAD_VALUE;
13441         }
13442     }
13443 
13444     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
13445         uint8_t videoStabMode =
13446                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
13447         LOGD("videoStabMode from APP = %d", videoStabMode);
13448         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
13449                 videoStabMode)) {
13450             rc = BAD_VALUE;
13451         }
13452     }
13453 
13454 
13455     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
13456         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
13457         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
13458                 noiseRedMode)) {
13459             rc = BAD_VALUE;
13460         }
13461     }
13462 
13463     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
13464         float reprocessEffectiveExposureFactor =
13465             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
13466         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
13467                 reprocessEffectiveExposureFactor)) {
13468             rc = BAD_VALUE;
13469         }
13470     }
13471 
13472     cam_crop_region_t scalerCropRegion;
13473     bool scalerCropSet = false;
13474     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
13475         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
13476         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
13477         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
13478         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
13479 
13480         if (frame_settings.exists(ANDROID_CONTROL_ZOOM_RATIO)) {
13481             mLastRequestedZoomRatio = frame_settings.find(ANDROID_CONTROL_ZOOM_RATIO).data.f[0];
13482             mLastRequestedZoomRatio = MIN(MAX(mLastRequestedZoomRatio, 1.0f),
13483                     gCamCapability[mCameraId]->max_zoom);
13484             LOGD("setting zoomRatio %f", mLastRequestedZoomRatio);
13485         }
13486 
13487         // Map coordinate system from active array to sensor output.
13488         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
13489                 scalerCropRegion.width, scalerCropRegion.height, mLastRequestedZoomRatio);
13490 
13491         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
13492                 scalerCropRegion)) {
13493             rc = BAD_VALUE;
13494         }
13495         scalerCropSet = true;
13496     }
13497 
13498     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
13499         int64_t sensorExpTime =
13500                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
13501         LOGD("setting sensorExpTime %lld", sensorExpTime);
13502         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
13503                 sensorExpTime)) {
13504             rc = BAD_VALUE;
13505         }
13506     }
13507 
13508     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
13509         int64_t sensorFrameDuration =
13510                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
13511         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
13512         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
13513             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
13514         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
13515         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
13516                 sensorFrameDuration)) {
13517             rc = BAD_VALUE;
13518         }
13519     }
13520 
13521     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
13522         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
13523         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
13524                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
13525         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
13526                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
13527         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
13528         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
13529                 sensorSensitivity)) {
13530             rc = BAD_VALUE;
13531         }
13532     }
13533 
13534 #ifndef USE_HAL_3_3
13535     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
13536         int32_t ispSensitivity =
13537             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
13538         if (ispSensitivity <
13539             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
13540                 ispSensitivity =
13541                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
13542                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13543         }
13544         if (ispSensitivity >
13545             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
13546                 ispSensitivity =
13547                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
13548                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13549         }
13550         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
13551                 ispSensitivity)) {
13552             rc = BAD_VALUE;
13553         }
13554     }
13555 #endif
13556 
13557     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
13558         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
13559         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
13560             rc = BAD_VALUE;
13561         }
13562     }
13563 
13564     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
13565         uint8_t fwk_facedetectMode =
13566                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
13567 
13568         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
13569                 fwk_facedetectMode);
13570 
13571         if (NAME_NOT_FOUND != val) {
13572             uint8_t facedetectMode = (uint8_t)val;
13573             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
13574                     facedetectMode)) {
13575                 rc = BAD_VALUE;
13576             }
13577         }
13578     }
13579 
13580     if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
13581         uint8_t histogramMode =
13582                 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
13583         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13584                 histogramMode)) {
13585             rc = BAD_VALUE;
13586         }
13587     }
13588 
13589     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
13590         uint8_t sharpnessMapMode =
13591                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
13592         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
13593                 sharpnessMapMode)) {
13594             rc = BAD_VALUE;
13595         }
13596     }
13597 
13598     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
13599         uint8_t tonemapMode =
13600                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
13601         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
13602             rc = BAD_VALUE;
13603         }
13604     }
13605     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
13606     /*All tonemap channels will have the same number of points*/
13607     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
13608         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
13609         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
13610         cam_rgb_tonemap_curves tonemapCurves;
13611         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
13612         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
13613             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
13614                      tonemapCurves.tonemap_points_cnt,
13615                     CAM_MAX_TONEMAP_CURVE_SIZE);
13616             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
13617         }
13618 
13619         /* ch0 = G*/
13620         size_t point = 0;
13621         cam_tonemap_curve_t tonemapCurveGreen;
13622         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13623             for (size_t j = 0; j < 2; j++) {
13624                tonemapCurveGreen.tonemap_points[i][j] =
13625                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
13626                point++;
13627             }
13628         }
13629         tonemapCurves.curves[0] = tonemapCurveGreen;
13630 
13631         /* ch 1 = B */
13632         point = 0;
13633         cam_tonemap_curve_t tonemapCurveBlue;
13634         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13635             for (size_t j = 0; j < 2; j++) {
13636                tonemapCurveBlue.tonemap_points[i][j] =
13637                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
13638                point++;
13639             }
13640         }
13641         tonemapCurves.curves[1] = tonemapCurveBlue;
13642 
13643         /* ch 2 = R */
13644         point = 0;
13645         cam_tonemap_curve_t tonemapCurveRed;
13646         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13647             for (size_t j = 0; j < 2; j++) {
13648                tonemapCurveRed.tonemap_points[i][j] =
13649                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
13650                point++;
13651             }
13652         }
13653         tonemapCurves.curves[2] = tonemapCurveRed;
13654 
13655         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
13656                 tonemapCurves)) {
13657             rc = BAD_VALUE;
13658         }
13659     }
13660 
13661     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
13662         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
13663         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
13664                 captureIntent)) {
13665             rc = BAD_VALUE;
13666         }
13667     }
13668 
13669     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
13670         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
13671         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
13672                 blackLevelLock)) {
13673             rc = BAD_VALUE;
13674         }
13675     }
13676 
13677     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
13678         uint8_t lensShadingMapMode =
13679                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
13680         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
13681                 lensShadingMapMode)) {
13682             rc = BAD_VALUE;
13683         }
13684     }
13685 
13686     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
13687         cam_area_t roi;
13688         bool reset = true;
13689         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
13690 
13691         // Map coordinate system from active array to sensor output.
13692         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13693                 roi.rect.height, mLastRequestedZoomRatio);
13694 
13695         if (scalerCropSet) {
13696             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13697         }
13698 
13699         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
13700             rc = BAD_VALUE;
13701         }
13702     }
13703 
13704     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
13705         cam_area_t roi;
13706         bool reset = true;
13707         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
13708 
13709         // Map coordinate system from active array to sensor output.
13710         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13711                 roi.rect.height, mLastRequestedZoomRatio);
13712 
13713         if (scalerCropSet) {
13714             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13715         }
13716         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13717             rc = BAD_VALUE;
13718         }
13719     }
13720 
13721     // CDS for non-HFR non-video mode
13722     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13723             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13724         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13725         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13726             LOGE("Invalid CDS mode %d!", *fwk_cds);
13727         } else {
13728             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13729                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13730                 rc = BAD_VALUE;
13731             }
13732         }
13733     }
13734 
13735     // Video HDR
13736     cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
13737     if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
13738         vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13739     }
13740     if (m_bVideoHdrEnabled)
13741         vhdr = CAM_VIDEO_HDR_MODE_ON;
13742 
13743     int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13744 
13745     if(vhdr != curr_hdr_state)
13746         LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13747 
13748     rc = setVideoHdrMode(mParameters, vhdr);
13749     if (rc != NO_ERROR) {
13750         LOGE("setVideoHDR is failed");
13751     }
13752 
13753     //IR
13754     if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13755         cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13756                 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
13757         uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13758         uint8_t isIRon = 0;
13759 
13760         (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
13761         if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13762             LOGE("Invalid IR mode %d!", fwk_ir);
13763         } else {
13764             if(isIRon != curr_ir_state )
13765                LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13766 
13767             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13768                     CAM_INTF_META_IR_MODE, fwk_ir)) {
13769                 rc = BAD_VALUE;
13770             }
13771         }
13772     }
13773 
13774     //Binning Correction Mode
13775     if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13776         cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13777                 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13778         if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13779                 || (0 > fwk_binning_correction)) {
13780             LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13781         } else {
13782             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13783                     CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13784                 rc = BAD_VALUE;
13785             }
13786         }
13787     }
13788 
13789     if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13790         float aec_speed;
13791         aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13792         LOGD("AEC Speed :%f", aec_speed);
13793         if ( aec_speed < 0 ) {
13794             LOGE("Invalid AEC mode %f!", aec_speed);
13795         } else {
13796             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13797                     aec_speed)) {
13798                 rc = BAD_VALUE;
13799             }
13800         }
13801     }
13802 
13803     if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13804         float awb_speed;
13805         awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13806         LOGD("AWB Speed :%f", awb_speed);
13807         if ( awb_speed < 0 ) {
13808             LOGE("Invalid AWB mode %f!", awb_speed);
13809         } else {
13810             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13811                     awb_speed)) {
13812                 rc = BAD_VALUE;
13813             }
13814         }
13815     }
13816 
13817     // TNR
13818     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13819         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13820         uint8_t b_TnrRequested = 0;
13821         uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
13822         cam_denoise_param_t tnr;
13823         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13824         tnr.process_plates =
13825             (cam_denoise_process_type_t)frame_settings.find(
13826             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13827         b_TnrRequested = tnr.denoise_enable;
13828 
13829         if(b_TnrRequested != curr_tnr_state)
13830            LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13831 
13832         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13833             rc = BAD_VALUE;
13834         }
13835     }
13836 
13837     if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
13838         int32_t* exposure_metering_mode =
13839                 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
13840         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13841                 *exposure_metering_mode)) {
13842             rc = BAD_VALUE;
13843         }
13844     }
13845 
13846     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13847         int32_t fwk_testPatternMode =
13848                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13849         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13850                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13851 
13852         if (NAME_NOT_FOUND != testPatternMode) {
13853             cam_test_pattern_data_t testPatternData;
13854             memset(&testPatternData, 0, sizeof(testPatternData));
13855             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13856             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13857                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13858                 int32_t *fwk_testPatternData =
13859                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13860                 testPatternData.r = fwk_testPatternData[0];
13861                 testPatternData.b = fwk_testPatternData[3];
13862                 switch (gCamCapability[mCameraId]->color_arrangement) {
13863                     case CAM_FILTER_ARRANGEMENT_RGGB:
13864                     case CAM_FILTER_ARRANGEMENT_GRBG:
13865                         testPatternData.gr = fwk_testPatternData[1];
13866                         testPatternData.gb = fwk_testPatternData[2];
13867                         break;
13868                     case CAM_FILTER_ARRANGEMENT_GBRG:
13869                     case CAM_FILTER_ARRANGEMENT_BGGR:
13870                         testPatternData.gr = fwk_testPatternData[2];
13871                         testPatternData.gb = fwk_testPatternData[1];
13872                         break;
13873                     default:
13874                         LOGE("color arrangement %d is not supported",
13875                                 gCamCapability[mCameraId]->color_arrangement);
13876                         break;
13877                 }
13878             }
13879             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13880                     testPatternData)) {
13881                 rc = BAD_VALUE;
13882             }
13883         } else {
13884             LOGE("Invalid framework sensor test pattern mode %d",
13885                     fwk_testPatternMode);
13886         }
13887     }
13888 
13889     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13890         size_t count = 0;
13891         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13892         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13893                 gps_coords.data.d, gps_coords.count, count);
13894         if (gps_coords.count != count) {
13895             rc = BAD_VALUE;
13896         }
13897     }
13898 
13899     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13900         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13901         size_t count = 0;
13902         const char *gps_methods_src = (const char *)
13903                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13904         memset(gps_methods, '\0', sizeof(gps_methods));
13905         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13906         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13907                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13908         if (GPS_PROCESSING_METHOD_SIZE != count) {
13909             rc = BAD_VALUE;
13910         }
13911     }
13912 
13913     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13914         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13915         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13916                 gps_timestamp)) {
13917             rc = BAD_VALUE;
13918         }
13919     }
13920 
13921     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13922         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13923         cam_rotation_info_t rotation_info;
13924         if (orientation == 0) {
13925            rotation_info.rotation = ROTATE_0;
13926         } else if (orientation == 90) {
13927            rotation_info.rotation = ROTATE_90;
13928         } else if (orientation == 180) {
13929            rotation_info.rotation = ROTATE_180;
13930         } else if (orientation == 270) {
13931            rotation_info.rotation = ROTATE_270;
13932         }
13933         rotation_info.device_rotation = ROTATE_0;
13934         rotation_info.streamId = snapshotStreamId;
13935         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13936         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13937             rc = BAD_VALUE;
13938         }
13939     }
13940 
13941     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13942         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13943         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13944             rc = BAD_VALUE;
13945         }
13946     }
13947 
13948     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13949         uint32_t thumb_quality = (uint32_t)
13950                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13951         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13952                 thumb_quality)) {
13953             rc = BAD_VALUE;
13954         }
13955     }
13956 
13957     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13958         cam_dimension_t dim;
13959         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13960         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13961         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13962             rc = BAD_VALUE;
13963         }
13964     }
13965 
13966     // Internal metadata
13967     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13968         size_t count = 0;
13969         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13970         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13971                 privatedata.data.i32, privatedata.count, count);
13972         if (privatedata.count != count) {
13973             rc = BAD_VALUE;
13974         }
13975     }
13976 
13977     // ISO/Exposure Priority
13978     if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13979         frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13980         cam_priority_mode_t mode =
13981                 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13982         if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13983             cam_intf_parm_manual_3a_t use_iso_exp_pty;
13984             use_iso_exp_pty.previewOnly = FALSE;
13985             uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13986             use_iso_exp_pty.value = *ptr;
13987 
13988             if(CAM_ISO_PRIORITY == mode) {
13989                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13990                         use_iso_exp_pty)) {
13991                     rc = BAD_VALUE;
13992                 }
13993             }
13994             else {
13995                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13996                         use_iso_exp_pty)) {
13997                     rc = BAD_VALUE;
13998                 }
13999             }
14000 
14001             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
14002                     rc = BAD_VALUE;
14003             }
14004         }
14005     } else {
14006         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
14007             rc = BAD_VALUE;
14008         }
14009     }
14010 
14011     // Saturation
14012     if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
14013         int32_t* use_saturation =
14014                 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
14015         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
14016             rc = BAD_VALUE;
14017         }
14018     }
14019 
14020     // EV step
14021     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
14022             gCamCapability[mCameraId]->exp_compensation_step)) {
14023         rc = BAD_VALUE;
14024     }
14025 
14026     // CDS info
14027     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
14028         cam_cds_data_t *cdsData = (cam_cds_data_t *)
14029                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
14030 
14031         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14032                 CAM_INTF_META_CDS_DATA, *cdsData)) {
14033             rc = BAD_VALUE;
14034         }
14035     }
14036 
14037     // Hybrid AE
14038     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
14039         uint8_t *hybrid_ae = (uint8_t *)
14040                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
14041         // Motion tracking intent isn't compatible with hybrid ae.
14042         if (mCaptureIntent == CAM_INTENT_MOTION_TRACKING) {
14043             *hybrid_ae = 0;
14044         }
14045         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
14046             rc = BAD_VALUE;
14047         }
14048     }
14049 
14050     // Motion Detection
14051     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
14052         uint8_t *motion_detection = (uint8_t *)
14053                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8;
14054         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MOTION_DETECTION_ENABLE, *motion_detection)) {
14055             rc = BAD_VALUE;
14056         }
14057     }
14058 
14059     // Histogram
14060     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
14061         uint8_t histogramMode =
14062                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
14063         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
14064                 histogramMode)) {
14065             rc = BAD_VALUE;
14066         }
14067     }
14068 
14069     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
14070         int32_t histogramBins =
14071                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
14072         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
14073                 histogramBins)) {
14074             rc = BAD_VALUE;
14075         }
14076     }
14077 
14078     // Tracking AF
14079     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
14080         uint8_t trackingAfTrigger =
14081                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
14082         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
14083                 trackingAfTrigger)) {
14084             rc = BAD_VALUE;
14085         }
14086     }
14087 
14088     // Makernote
14089     camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
14090     if (entry.count != 0) {
14091         if (entry.count <= MAX_MAKERNOTE_LENGTH) {
14092             cam_makernote_t makernote;
14093             makernote.length = entry.count;
14094             memcpy(makernote.data, entry.data.u8, makernote.length);
14095             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
14096                 rc = BAD_VALUE;
14097             }
14098         } else {
14099             ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
14100                     MAX_MAKERNOTE_LENGTH);
14101             rc = BAD_VALUE;
14102         }
14103     }
14104 
14105     return rc;
14106 }
14107 
14108 /*===========================================================================
14109  * FUNCTION   : captureResultCb
14110  *
14111  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
14112  *
14113  * PARAMETERS :
14114  *   @frame  : frame information from mm-camera-interface
14115  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
14116  *   @userdata: userdata
14117  *
14118  * RETURN     : NONE
14119  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)14120 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
14121                 camera3_stream_buffer_t *buffer,
14122                 uint32_t frame_number, bool isInputBuffer, void *userdata)
14123 {
14124     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14125     if (hw == NULL) {
14126         LOGE("Invalid hw %p", hw);
14127         return;
14128     }
14129 
14130     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
14131     return;
14132 }
14133 
14134 /*===========================================================================
14135  * FUNCTION   : setBufferErrorStatus
14136  *
14137  * DESCRIPTION: Callback handler for channels to report any buffer errors
14138  *
14139  * PARAMETERS :
14140  *   @ch     : Channel on which buffer error is reported from
14141  *   @frame_number  : frame number on which buffer error is reported on
14142  *   @buffer_status : buffer error status
14143  *   @userdata: userdata
14144  *
14145  * RETURN     : NONE
14146  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)14147 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14148         uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
14149 {
14150     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14151     if (hw == NULL) {
14152         LOGE("Invalid hw %p", hw);
14153         return;
14154     }
14155 
14156     hw->setBufferErrorStatus(ch, frame_number, err);
14157     return;
14158 }
14159 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)14160 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14161         uint32_t frameNumber, camera3_buffer_status_t err)
14162 {
14163     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
14164     pthread_mutex_lock(&mMutex);
14165 
14166     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
14167         if (req.frame_number != frameNumber)
14168             continue;
14169         for (auto& k : req.mPendingBufferList) {
14170             if(k.stream->priv == ch) {
14171                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
14172             }
14173         }
14174     }
14175 
14176     pthread_mutex_unlock(&mMutex);
14177     return;
14178 }
14179 /*===========================================================================
14180  * FUNCTION   : initialize
14181  *
14182  * DESCRIPTION: Pass framework callback pointers to HAL
14183  *
14184  * PARAMETERS :
14185  *
14186  *
14187  * RETURN     : Success : 0
14188  *              Failure: -ENODEV
14189  *==========================================================================*/
14190 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)14191 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
14192                                   const camera3_callback_ops_t *callback_ops)
14193 {
14194     LOGD("E");
14195     QCamera3HardwareInterface *hw =
14196         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14197     if (!hw) {
14198         LOGE("NULL camera device");
14199         return -ENODEV;
14200     }
14201 
14202     int rc = hw->initialize(callback_ops);
14203     LOGD("X");
14204     return rc;
14205 }
14206 
14207 /*===========================================================================
14208  * FUNCTION   : configure_streams
14209  *
14210  * DESCRIPTION:
14211  *
14212  * PARAMETERS :
14213  *
14214  *
14215  * RETURN     : Success: 0
14216  *              Failure: -EINVAL (if stream configuration is invalid)
14217  *                       -ENODEV (fatal error)
14218  *==========================================================================*/
14219 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)14220 int QCamera3HardwareInterface::configure_streams(
14221         const struct camera3_device *device,
14222         camera3_stream_configuration_t *stream_list)
14223 {
14224     LOGD("E");
14225     QCamera3HardwareInterface *hw =
14226         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14227     if (!hw) {
14228         LOGE("NULL camera device");
14229         return -ENODEV;
14230     }
14231     int rc = hw->configureStreams(stream_list);
14232     LOGD("X");
14233     return rc;
14234 }
14235 
14236 /*===========================================================================
14237  * FUNCTION   : construct_default_request_settings
14238  *
14239  * DESCRIPTION: Configure a settings buffer to meet the required use case
14240  *
14241  * PARAMETERS :
14242  *
14243  *
14244  * RETURN     : Success: Return valid metadata
14245  *              Failure: Return NULL
14246  *==========================================================================*/
14247 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)14248     construct_default_request_settings(const struct camera3_device *device,
14249                                         int type)
14250 {
14251 
14252     LOGD("E");
14253     camera_metadata_t* fwk_metadata = NULL;
14254     QCamera3HardwareInterface *hw =
14255         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14256     if (!hw) {
14257         LOGE("NULL camera device");
14258         return NULL;
14259     }
14260 
14261     fwk_metadata = hw->translateCapabilityToMetadata(type);
14262 
14263     LOGD("X");
14264     return fwk_metadata;
14265 }
14266 
14267 /*===========================================================================
14268  * FUNCTION   : process_capture_request
14269  *
14270  * DESCRIPTION:
14271  *
14272  * PARAMETERS :
14273  *
14274  *
14275  * RETURN     :
14276  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)14277 int QCamera3HardwareInterface::process_capture_request(
14278                     const struct camera3_device *device,
14279                     camera3_capture_request_t *request)
14280 {
14281     LOGD("E");
14282     CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
14283     QCamera3HardwareInterface *hw =
14284         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14285     if (!hw) {
14286         LOGE("NULL camera device");
14287         return -EINVAL;
14288     }
14289 
14290     int rc = hw->orchestrateRequest(request);
14291     LOGD("X");
14292     return rc;
14293 }
14294 
14295 /*===========================================================================
14296  * FUNCTION   : dump
14297  *
14298  * DESCRIPTION:
14299  *
14300  * PARAMETERS :
14301  *
14302  *
14303  * RETURN     :
14304  *==========================================================================*/
14305 
dump(const struct camera3_device * device,int fd)14306 void QCamera3HardwareInterface::dump(
14307                 const struct camera3_device *device, int fd)
14308 {
14309     /* Log level property is read when "adb shell dumpsys media.camera" is
14310        called so that the log level can be controlled without restarting
14311        the media server */
14312     getLogLevel();
14313 
14314     LOGD("E");
14315     QCamera3HardwareInterface *hw =
14316         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14317     if (!hw) {
14318         LOGE("NULL camera device");
14319         return;
14320     }
14321 
14322     hw->dump(fd);
14323     LOGD("X");
14324     return;
14325 }
14326 
14327 /*===========================================================================
14328  * FUNCTION   : flush
14329  *
14330  * DESCRIPTION:
14331  *
14332  * PARAMETERS :
14333  *
14334  *
14335  * RETURN     :
14336  *==========================================================================*/
14337 
flush(const struct camera3_device * device)14338 int QCamera3HardwareInterface::flush(
14339                 const struct camera3_device *device)
14340 {
14341     int rc;
14342     LOGD("E");
14343     QCamera3HardwareInterface *hw =
14344         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14345     if (!hw) {
14346         LOGE("NULL camera device");
14347         return -EINVAL;
14348     }
14349 
14350     pthread_mutex_lock(&hw->mMutex);
14351     // Validate current state
14352     switch (hw->mState) {
14353         case STARTED:
14354             /* valid state */
14355             break;
14356 
14357         case ERROR:
14358             pthread_mutex_unlock(&hw->mMutex);
14359             hw->handleCameraDeviceError();
14360             return -ENODEV;
14361 
14362         default:
14363             LOGI("Flush returned during state %d", hw->mState);
14364             pthread_mutex_unlock(&hw->mMutex);
14365             return 0;
14366     }
14367     pthread_mutex_unlock(&hw->mMutex);
14368 
14369     rc = hw->flush(true /* restart channels */ );
14370     LOGD("X");
14371     return rc;
14372 }
14373 
14374 /*===========================================================================
14375  * FUNCTION   : close_camera_device
14376  *
14377  * DESCRIPTION:
14378  *
14379  * PARAMETERS :
14380  *
14381  *
14382  * RETURN     :
14383  *==========================================================================*/
close_camera_device(struct hw_device_t * device)14384 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
14385 {
14386     int ret = NO_ERROR;
14387     QCamera3HardwareInterface *hw =
14388         reinterpret_cast<QCamera3HardwareInterface *>(
14389             reinterpret_cast<camera3_device_t *>(device)->priv);
14390     if (!hw) {
14391         LOGE("NULL camera device");
14392         return BAD_VALUE;
14393     }
14394 
14395     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
14396     delete hw;
14397     LOGI("[KPI Perf]: X");
14398     CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
14399     return ret;
14400 }
14401 
14402 /*===========================================================================
14403  * FUNCTION   : getWaveletDenoiseProcessPlate
14404  *
14405  * DESCRIPTION: query wavelet denoise process plate
14406  *
14407  * PARAMETERS : None
14408  *
14409  * RETURN     : WNR prcocess plate value
14410  *==========================================================================*/
getWaveletDenoiseProcessPlate()14411 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
14412 {
14413     char prop[PROPERTY_VALUE_MAX];
14414     memset(prop, 0, sizeof(prop));
14415     property_get("persist.denoise.process.plates", prop, "0");
14416     int processPlate = atoi(prop);
14417     switch(processPlate) {
14418     case 0:
14419         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14420     case 1:
14421         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14422     case 2:
14423         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14424     case 3:
14425         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14426     default:
14427         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14428     }
14429 }
14430 
14431 
14432 /*===========================================================================
14433  * FUNCTION   : getTemporalDenoiseProcessPlate
14434  *
14435  * DESCRIPTION: query temporal denoise process plate
14436  *
14437  * PARAMETERS : None
14438  *
14439  * RETURN     : TNR prcocess plate value
14440  *==========================================================================*/
getTemporalDenoiseProcessPlate()14441 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
14442 {
14443     char prop[PROPERTY_VALUE_MAX];
14444     memset(prop, 0, sizeof(prop));
14445     property_get("persist.tnr.process.plates", prop, "0");
14446     int processPlate = atoi(prop);
14447     switch(processPlate) {
14448     case 0:
14449         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14450     case 1:
14451         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14452     case 2:
14453         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14454     case 3:
14455         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14456     default:
14457         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14458     }
14459 }
14460 
14461 
14462 /*===========================================================================
14463  * FUNCTION   : extractSceneMode
14464  *
14465  * DESCRIPTION: Extract scene mode from frameworks set metadata
14466  *
14467  * PARAMETERS :
14468  *      @frame_settings: CameraMetadata reference
14469  *      @metaMode: ANDROID_CONTORL_MODE
14470  *      @hal_metadata: hal metadata structure
14471  *
14472  * RETURN     : None
14473  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)14474 int32_t QCamera3HardwareInterface::extractSceneMode(
14475         const CameraMetadata &frame_settings, uint8_t metaMode,
14476         metadata_buffer_t *hal_metadata)
14477 {
14478     int32_t rc = NO_ERROR;
14479     uint8_t sceneMode = CAM_SCENE_MODE_OFF;
14480 
14481     if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
14482         LOGD("Ignoring control mode OFF_KEEP_STATE");
14483         return NO_ERROR;
14484     }
14485 
14486     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
14487         camera_metadata_ro_entry entry =
14488                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
14489         if (0 == entry.count)
14490             return rc;
14491 
14492         uint8_t fwk_sceneMode = entry.data.u8[0];
14493 
14494         int val = lookupHalName(SCENE_MODES_MAP,
14495                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
14496                 fwk_sceneMode);
14497         if (NAME_NOT_FOUND != val) {
14498             sceneMode = (uint8_t)val;
14499             LOGD("sceneMode: %d", sceneMode);
14500         }
14501     }
14502 
14503     if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
14504         rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
14505     }
14506 
14507     if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
14508         if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
14509             cam_hdr_param_t hdr_params;
14510             hdr_params.hdr_enable = 1;
14511             hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14512             hdr_params.hdr_need_1x = false;
14513             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14514                     CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14515                 rc = BAD_VALUE;
14516             }
14517         }
14518 
14519         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14520                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
14521             rc = BAD_VALUE;
14522         }
14523     }
14524 
14525     if (mForceHdrSnapshot) {
14526         cam_hdr_param_t hdr_params;
14527         hdr_params.hdr_enable = 1;
14528         hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14529         hdr_params.hdr_need_1x = false;
14530         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14531                 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14532             rc = BAD_VALUE;
14533         }
14534     }
14535 
14536     return rc;
14537 }
14538 
14539 /*===========================================================================
14540  * FUNCTION   : setVideoHdrMode
14541  *
14542  * DESCRIPTION: Set Video HDR mode from frameworks set metadata
14543  *
14544  * PARAMETERS :
14545  *      @hal_metadata: hal metadata structure
14546  *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
14547  *
14548  * RETURN     : None
14549  *==========================================================================*/
setVideoHdrMode(metadata_buffer_t * hal_metadata,cam_video_hdr_mode_t vhdr)14550 int32_t QCamera3HardwareInterface::setVideoHdrMode(
14551         metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
14552 {
14553     if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
14554         return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
14555     }
14556 
14557     LOGE("Invalid Video HDR mode %d!", vhdr);
14558     return BAD_VALUE;
14559 }
14560 
14561 /*===========================================================================
14562  * FUNCTION   : setSensorHDR
14563  *
14564  * DESCRIPTION: Enable/disable sensor HDR.
14565  *
14566  * PARAMETERS :
14567  *      @hal_metadata: hal metadata structure
14568  *      @enable: boolean whether to enable/disable sensor HDR
14569  *
14570  * RETURN     : None
14571  *==========================================================================*/
setSensorHDR(metadata_buffer_t * hal_metadata,bool enable,bool isVideoHdrEnable)14572 int32_t QCamera3HardwareInterface::setSensorHDR(
14573         metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
14574 {
14575     int32_t rc = NO_ERROR;
14576     cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
14577 
14578     if (enable) {
14579         char sensor_hdr_prop[PROPERTY_VALUE_MAX];
14580         memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
14581         #ifdef _LE_CAMERA_
14582         //Default to staggered HDR for IOT
14583         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
14584         #else
14585         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
14586         #endif
14587         sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
14588     }
14589 
14590     bool isSupported = false;
14591     switch (sensor_hdr) {
14592         case CAM_SENSOR_HDR_IN_SENSOR:
14593             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14594                     CAM_QCOM_FEATURE_SENSOR_HDR) {
14595                 isSupported = true;
14596                 LOGD("Setting HDR mode In Sensor");
14597             }
14598             break;
14599         case CAM_SENSOR_HDR_ZIGZAG:
14600             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14601                     CAM_QCOM_FEATURE_ZIGZAG_HDR) {
14602                 isSupported = true;
14603                 LOGD("Setting HDR mode Zigzag");
14604             }
14605             break;
14606         case CAM_SENSOR_HDR_STAGGERED:
14607             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14608                     CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
14609                 isSupported = true;
14610                 LOGD("Setting HDR mode Staggered");
14611             }
14612             break;
14613         case CAM_SENSOR_HDR_OFF:
14614             isSupported = true;
14615             LOGD("Turning off sensor HDR");
14616             break;
14617         default:
14618             LOGE("HDR mode %d not supported", sensor_hdr);
14619             rc = BAD_VALUE;
14620             break;
14621     }
14622 
14623     if(isSupported) {
14624         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14625                 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
14626             rc = BAD_VALUE;
14627         } else {
14628             if(!isVideoHdrEnable)
14629                 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
14630         }
14631     }
14632     return rc;
14633 }
14634 
14635 /*===========================================================================
14636  * FUNCTION   : needRotationReprocess
14637  *
14638  * DESCRIPTION: if rotation needs to be done by reprocess in pp
14639  *
14640  * PARAMETERS : none
14641  *
14642  * RETURN     : true: needed
14643  *              false: no need
14644  *==========================================================================*/
needRotationReprocess()14645 bool QCamera3HardwareInterface::needRotationReprocess()
14646 {
14647     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
14648         // current rotation is not zero, and pp has the capability to process rotation
14649         LOGH("need do reprocess for rotation");
14650         return true;
14651     }
14652 
14653     return false;
14654 }
14655 
14656 /*===========================================================================
14657  * FUNCTION   : needReprocess
14658  *
14659  * DESCRIPTION: if reprocess in needed
14660  *
14661  * PARAMETERS : none
14662  *
14663  * RETURN     : true: needed
14664  *              false: no need
14665  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)14666 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
14667 {
14668     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
14669         // TODO: add for ZSL HDR later
14670         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
14671         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
14672             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
14673             return true;
14674         } else {
14675             LOGH("already post processed frame");
14676             return false;
14677         }
14678     }
14679     return needRotationReprocess();
14680 }
14681 
14682 /*===========================================================================
14683  * FUNCTION   : needJpegExifRotation
14684  *
14685  * DESCRIPTION: if rotation from jpeg is needed
14686  *
14687  * PARAMETERS : none
14688  *
14689  * RETURN     : true: needed
14690  *              false: no need
14691  *==========================================================================*/
needJpegExifRotation()14692 bool QCamera3HardwareInterface::needJpegExifRotation()
14693 {
14694     /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
14695     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14696        LOGD("Need use Jpeg EXIF Rotation");
14697        return true;
14698     }
14699     return false;
14700 }
14701 
14702 /*===========================================================================
14703  * FUNCTION   : addOfflineReprocChannel
14704  *
14705  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
14706  *              coming from input channel
14707  *
14708  * PARAMETERS :
14709  *   @config  : reprocess configuration
14710  *   @inputChHandle : pointer to the input (source) channel
14711  *
14712  *
14713  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
14714  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)14715 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
14716         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
14717 {
14718     int32_t rc = NO_ERROR;
14719     QCamera3ReprocessChannel *pChannel = NULL;
14720 
14721     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
14722             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14723             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
14724     if (NULL == pChannel) {
14725         LOGE("no mem for reprocess channel");
14726         return NULL;
14727     }
14728 
14729     rc = pChannel->initialize(IS_TYPE_NONE);
14730     if (rc != NO_ERROR) {
14731         LOGE("init reprocess channel failed, ret = %d", rc);
14732         delete pChannel;
14733         return NULL;
14734     }
14735 
14736     // pp feature config
14737     cam_pp_feature_config_t pp_config;
14738     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14739 
14740     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14741     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14742             & CAM_QCOM_FEATURE_DSDN) {
14743         //Use CPP CDS incase h/w supports it.
14744         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14745         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14746     }
14747     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14748         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14749     }
14750 
14751     if (config.hdr_param.hdr_enable) {
14752         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14753         pp_config.hdr_param = config.hdr_param;
14754     }
14755 
14756     if (mForceHdrSnapshot) {
14757         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14758         pp_config.hdr_param.hdr_enable = 1;
14759         pp_config.hdr_param.hdr_need_1x = 0;
14760         pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14761     }
14762 
14763     rc = pChannel->addReprocStreamsFromSource(pp_config,
14764             config,
14765             IS_TYPE_NONE,
14766             mMetadataChannel);
14767 
14768     if (rc != NO_ERROR) {
14769         delete pChannel;
14770         return NULL;
14771     }
14772     return pChannel;
14773 }
14774 
14775 /*===========================================================================
14776  * FUNCTION   : getMobicatMask
14777  *
14778  * DESCRIPTION: returns mobicat mask
14779  *
14780  * PARAMETERS : none
14781  *
14782  * RETURN     : mobicat mask
14783  *
14784  *==========================================================================*/
getMobicatMask()14785 uint8_t QCamera3HardwareInterface::getMobicatMask()
14786 {
14787     return m_MobicatMask;
14788 }
14789 
14790 /*===========================================================================
14791  * FUNCTION   : setMobicat
14792  *
14793  * DESCRIPTION: set Mobicat on/off.
14794  *
14795  * PARAMETERS :
14796  *   @params  : none
14797  *
14798  * RETURN     : int32_t type of status
14799  *              NO_ERROR  -- success
14800  *              none-zero failure code
14801  *==========================================================================*/
setMobicat()14802 int32_t QCamera3HardwareInterface::setMobicat()
14803 {
14804     int32_t ret = NO_ERROR;
14805 
14806     if (m_MobicatMask) {
14807         tune_cmd_t tune_cmd;
14808         tune_cmd.type = SET_RELOAD_CHROMATIX;
14809         tune_cmd.module = MODULE_ALL;
14810         tune_cmd.value = TRUE;
14811         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14812                 CAM_INTF_PARM_SET_VFE_COMMAND,
14813                 tune_cmd);
14814 
14815         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14816                 CAM_INTF_PARM_SET_PP_COMMAND,
14817                 tune_cmd);
14818     }
14819 
14820     return ret;
14821 }
14822 
14823 /*===========================================================================
14824 * FUNCTION   : getLogLevel
14825 *
14826 * DESCRIPTION: Reads the log level property into a variable
14827 *
14828 * PARAMETERS :
14829 *   None
14830 *
14831 * RETURN     :
14832 *   None
14833 *==========================================================================*/
getLogLevel()14834 void QCamera3HardwareInterface::getLogLevel()
14835 {
14836     char prop[PROPERTY_VALUE_MAX];
14837     uint32_t globalLogLevel = 0;
14838 
14839     property_get("persist.camera.hal.debug", prop, "0");
14840     int val = atoi(prop);
14841     if (0 <= val) {
14842         gCamHal3LogLevel = (uint32_t)val;
14843     }
14844 
14845     property_get("persist.camera.kpi.debug", prop, "0");
14846     gKpiDebugLevel = atoi(prop);
14847 
14848     property_get("persist.camera.global.debug", prop, "0");
14849     val = atoi(prop);
14850     if (0 <= val) {
14851         globalLogLevel = (uint32_t)val;
14852     }
14853 
14854     /* Highest log level among hal.logs and global.logs is selected */
14855     if (gCamHal3LogLevel < globalLogLevel)
14856         gCamHal3LogLevel = globalLogLevel;
14857 
14858     return;
14859 }
14860 
14861 /*===========================================================================
14862  * FUNCTION   : validateStreamRotations
14863  *
14864  * DESCRIPTION: Check if the rotations requested are supported
14865  *
14866  * PARAMETERS :
14867  *   @stream_list : streams to be configured
14868  *
14869  * RETURN     : NO_ERROR on success
14870  *              -EINVAL on failure
14871  *
14872  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)14873 int QCamera3HardwareInterface::validateStreamRotations(
14874         camera3_stream_configuration_t *streamList)
14875 {
14876     int rc = NO_ERROR;
14877 
14878     /*
14879     * Loop through all streams requested in configuration
14880     * Check if unsupported rotations have been requested on any of them
14881     */
14882     for (size_t j = 0; j < streamList->num_streams; j++){
14883         camera3_stream_t *newStream = streamList->streams[j];
14884 
14885         switch(newStream->rotation) {
14886             case CAMERA3_STREAM_ROTATION_0:
14887             case CAMERA3_STREAM_ROTATION_90:
14888             case CAMERA3_STREAM_ROTATION_180:
14889             case CAMERA3_STREAM_ROTATION_270:
14890                 //Expected values
14891                 break;
14892             default:
14893                 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14894                         "type:%d and stream format:%d", __func__,
14895                         newStream->rotation, newStream->stream_type,
14896                         newStream->format);
14897                 return -EINVAL;
14898         }
14899 
14900         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14901         bool isImplDef = (newStream->format ==
14902                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14903         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14904                 isImplDef);
14905 
14906         if (isRotated && (!isImplDef || isZsl)) {
14907             LOGE("Error: Unsupported rotation of %d requested for stream"
14908                     "type:%d and stream format:%d",
14909                     newStream->rotation, newStream->stream_type,
14910                     newStream->format);
14911             rc = -EINVAL;
14912             break;
14913         }
14914     }
14915 
14916     return rc;
14917 }
14918 
14919 /*===========================================================================
14920 * FUNCTION   : getFlashInfo
14921 *
14922 * DESCRIPTION: Retrieve information about whether the device has a flash.
14923 *
14924 * PARAMETERS :
14925 *   @cameraId  : Camera id to query
14926 *   @hasFlash  : Boolean indicating whether there is a flash device
14927 *                associated with given camera
14928 *   @flashNode : If a flash device exists, this will be its device node.
14929 *
14930 * RETURN     :
14931 *   None
14932 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])14933 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14934         bool& hasFlash,
14935         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14936 {
14937     cam_capability_t* camCapability = gCamCapability[cameraId];
14938     if (NULL == camCapability) {
14939         hasFlash = false;
14940         flashNode[0] = '\0';
14941     } else {
14942         hasFlash = camCapability->flash_available;
14943         strlcpy(flashNode,
14944                 (char*)camCapability->flash_dev_name,
14945                 QCAMERA_MAX_FILEPATH_LENGTH);
14946     }
14947 }
14948 
14949 /*===========================================================================
14950 * FUNCTION   : getEepromVersionInfo
14951 *
14952 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
14953 *
14954 * PARAMETERS : None
14955 *
14956 * RETURN     : string describing EEPROM version
14957 *              "\0" if no such info available
14958 *==========================================================================*/
getEepromVersionInfo()14959 const char *QCamera3HardwareInterface::getEepromVersionInfo()
14960 {
14961     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14962 }
14963 
14964 /*===========================================================================
14965 * FUNCTION   : getLdafCalib
14966 *
14967 * DESCRIPTION: Retrieve Laser AF calibration data
14968 *
14969 * PARAMETERS : None
14970 *
14971 * RETURN     : Two uint32_t describing laser AF calibration data
14972 *              NULL if none is available.
14973 *==========================================================================*/
getLdafCalib()14974 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14975 {
14976     if (mLdafCalibExist) {
14977         return &mLdafCalib[0];
14978     } else {
14979         return NULL;
14980     }
14981 }
14982 
14983 /*===========================================================================
14984 * FUNCTION   : getEaselFwVersion
14985 *
14986 * DESCRIPTION: Retrieve Easel firmware version
14987 *
14988 * PARAMETERS : None
14989 *
14990 * RETURN     : string describing Firmware version
14991 *              "\0" if version is not up to date
14992 *==========================================================================*/
getEaselFwVersion()14993 const char *QCamera3HardwareInterface::getEaselFwVersion()
14994 {
14995     if (mEaselFwUpdated) {
14996         return (const char *)&mEaselFwVersion[0];
14997     } else {
14998         return NULL;
14999     }
15000 }
15001 
15002 /*===========================================================================
15003  * FUNCTION   : dynamicUpdateMetaStreamInfo
15004  *
15005  * DESCRIPTION: This function:
15006  *             (1) stops all the channels
15007  *             (2) returns error on pending requests and buffers
15008  *             (3) sends metastream_info in setparams
15009  *             (4) starts all channels
15010  *             This is useful when sensor has to be restarted to apply any
15011  *             settings such as frame rate from a different sensor mode
15012  *
15013  * PARAMETERS : None
15014  *
15015  * RETURN     : NO_ERROR on success
15016  *              Error codes on failure
15017  *
15018  *==========================================================================*/
dynamicUpdateMetaStreamInfo()15019 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
15020 {
15021     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
15022     int rc = NO_ERROR;
15023 
15024     LOGD("E");
15025 
15026     rc = stopAllChannels();
15027     if (rc < 0) {
15028         LOGE("stopAllChannels failed");
15029         return rc;
15030     }
15031 
15032     rc = notifyErrorForPendingRequests();
15033     if (rc < 0) {
15034         LOGE("notifyErrorForPendingRequests failed");
15035         return rc;
15036     }
15037 
15038     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
15039         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
15040                 "Format:%d",
15041                 mStreamConfigInfo.type[i],
15042                 mStreamConfigInfo.stream_sizes[i].width,
15043                 mStreamConfigInfo.stream_sizes[i].height,
15044                 mStreamConfigInfo.postprocess_mask[i],
15045                 mStreamConfigInfo.format[i]);
15046     }
15047 
15048     /* Send meta stream info once again so that ISP can start */
15049     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
15050             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
15051     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
15052             mParameters);
15053     if (rc < 0) {
15054         LOGE("set Metastreaminfo failed. Sensor mode does not change");
15055     }
15056 
15057     rc = startAllChannels();
15058     if (rc < 0) {
15059         LOGE("startAllChannels failed");
15060         return rc;
15061     }
15062 
15063     LOGD("X");
15064     return rc;
15065 }
15066 
15067 /*===========================================================================
15068  * FUNCTION   : stopAllChannels
15069  *
15070  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
15071  *
15072  * PARAMETERS : None
15073  *
15074  * RETURN     : NO_ERROR on success
15075  *              Error codes on failure
15076  *
15077  *==========================================================================*/
stopAllChannels()15078 int32_t QCamera3HardwareInterface::stopAllChannels()
15079 {
15080     int32_t rc = NO_ERROR;
15081 
15082     LOGD("Stopping all channels");
15083     // Stop the Streams/Channels
15084     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15085         it != mStreamInfo.end(); it++) {
15086         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15087         if (channel) {
15088             channel->stop();
15089         }
15090         (*it)->status = INVALID;
15091     }
15092 
15093     if (mSupportChannel) {
15094         mSupportChannel->stop();
15095     }
15096     if (mAnalysisChannel) {
15097         mAnalysisChannel->stop();
15098     }
15099     if (mRawDumpChannel) {
15100         mRawDumpChannel->stop();
15101     }
15102     if (mHdrPlusRawSrcChannel) {
15103         mHdrPlusRawSrcChannel->stop();
15104     }
15105     if (mMetadataChannel) {
15106         /* If content of mStreamInfo is not 0, there is metadata stream */
15107         mMetadataChannel->stop();
15108     }
15109 
15110     LOGD("All channels stopped");
15111     return rc;
15112 }
15113 
15114 /*===========================================================================
15115  * FUNCTION   : startAllChannels
15116  *
15117  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
15118  *
15119  * PARAMETERS : None
15120  *
15121  * RETURN     : NO_ERROR on success
15122  *              Error codes on failure
15123  *
15124  *==========================================================================*/
startAllChannels()15125 int32_t QCamera3HardwareInterface::startAllChannels()
15126 {
15127     int32_t rc = NO_ERROR;
15128 
15129     LOGD("Start all channels ");
15130     // Start the Streams/Channels
15131     if (mMetadataChannel) {
15132         /* If content of mStreamInfo is not 0, there is metadata stream */
15133         rc = mMetadataChannel->start();
15134         if (rc < 0) {
15135             LOGE("META channel start failed");
15136             return rc;
15137         }
15138     }
15139     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15140         it != mStreamInfo.end(); it++) {
15141         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15142         if (channel) {
15143             rc = channel->start();
15144             if (rc < 0) {
15145                 LOGE("channel start failed");
15146                 return rc;
15147             }
15148         }
15149     }
15150     if (mAnalysisChannel) {
15151         mAnalysisChannel->start();
15152     }
15153     if (mSupportChannel) {
15154         rc = mSupportChannel->start();
15155         if (rc < 0) {
15156             LOGE("Support channel start failed");
15157             return rc;
15158         }
15159     }
15160     if (mRawDumpChannel) {
15161         rc = mRawDumpChannel->start();
15162         if (rc < 0) {
15163             LOGE("RAW dump channel start failed");
15164             return rc;
15165         }
15166     }
15167     if (mHdrPlusRawSrcChannel) {
15168         rc = mHdrPlusRawSrcChannel->start();
15169         if (rc < 0) {
15170             LOGE("HDR+ RAW channel start failed");
15171             return rc;
15172         }
15173     }
15174 
15175     LOGD("All channels started");
15176     return rc;
15177 }
15178 
15179 /*===========================================================================
15180  * FUNCTION   : notifyErrorForPendingRequests
15181  *
15182  * DESCRIPTION: This function sends error for all the pending requests/buffers
15183  *
15184  * PARAMETERS : None
15185  *
15186  * RETURN     : Error codes
15187  *              NO_ERROR on success
15188  *
15189  *==========================================================================*/
notifyErrorForPendingRequests()15190 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
15191 {
15192     notifyErrorFoPendingDepthData(mDepthChannel);
15193 
15194     auto pendingRequest = mPendingRequestsList.begin();
15195     auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15196 
15197     // Iterate through pending requests (for which result metadata isn't sent yet) and pending
15198     // buffers (for which buffers aren't sent yet).
15199     while (pendingRequest != mPendingRequestsList.end() ||
15200            pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15201         if (pendingRequest == mPendingRequestsList.end() ||
15202                 ((pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) &&
15203                  (pendingBuffer->frame_number < pendingRequest->frame_number))) {
15204             // If metadata for this frame was sent, notify about a buffer error and returns buffers
15205             // with error.
15206             for (auto &info : pendingBuffer->mPendingBufferList) {
15207                 // Send a buffer error for this frame number.
15208                 camera3_notify_msg_t notify_msg;
15209                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15210                 notify_msg.type = CAMERA3_MSG_ERROR;
15211                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15212                 notify_msg.message.error.error_stream = info.stream;
15213                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15214                 orchestrateNotify(&notify_msg);
15215 
15216                 camera3_stream_buffer_t buffer = {};
15217                 buffer.acquire_fence = -1;
15218                 buffer.release_fence = -1;
15219                 buffer.buffer = info.buffer;
15220                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15221                 buffer.stream = info.stream;
15222                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15223             }
15224 
15225             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15226         } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
15227                    ((pendingRequest != mPendingRequestsList.end()) &&
15228                    (pendingBuffer->frame_number > pendingRequest->frame_number ||
15229                     (pendingBuffer->frame_number == pendingRequest->frame_number &&
15230                      pendingBuffer->mPendingBufferList.size() < pendingRequest->num_buffers)))) {
15231             // If some or all buffers for this frame were sent already, notify about a result error,
15232             // as well as remaining buffer errors.
15233             camera3_notify_msg_t notify_msg;
15234             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15235             notify_msg.type = CAMERA3_MSG_ERROR;
15236             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
15237             notify_msg.message.error.error_stream = nullptr;
15238             notify_msg.message.error.frame_number = pendingRequest->frame_number;
15239             orchestrateNotify(&notify_msg);
15240 
15241             if (pendingRequest->input_buffer != nullptr) {
15242                 camera3_capture_result result = {};
15243                 result.frame_number = pendingRequest->frame_number;
15244                 result.result = nullptr;
15245                 result.input_buffer = pendingRequest->input_buffer;
15246                 orchestrateResult(&result);
15247             }
15248 
15249             if (pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end() &&
15250                     pendingBuffer->frame_number == pendingRequest->frame_number) {
15251                 for (const auto &info : pendingBuffer->mPendingBufferList) {
15252                     // Send a buffer error for this frame number.
15253                     camera3_notify_msg_t notify_msg;
15254                     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15255                     notify_msg.type = CAMERA3_MSG_ERROR;
15256                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15257                     notify_msg.message.error.error_stream = info.stream;
15258                     notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15259                     orchestrateNotify(&notify_msg);
15260 
15261                     camera3_stream_buffer_t buffer = {};
15262                     buffer.acquire_fence = -1;
15263                     buffer.release_fence = -1;
15264                     buffer.buffer = info.buffer;
15265                     buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15266                     buffer.stream = info.stream;
15267                     mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15268                 }
15269                 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15270             }
15271             mShutterDispatcher.clear(pendingRequest->frame_number);
15272             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15273         } else {
15274             // If both buffers and result metadata weren't sent yet, notify about a request error
15275             // and return buffers with error.
15276             camera3_notify_msg_t notify_msg;
15277             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15278             notify_msg.type = CAMERA3_MSG_ERROR;
15279             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
15280             notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15281             orchestrateNotify(&notify_msg);
15282 
15283             for (auto &info : pendingBuffer->mPendingBufferList) {
15284                 camera3_stream_buffer_t buffer = {};
15285                 buffer.acquire_fence = -1;
15286                 buffer.release_fence = -1;
15287                 buffer.buffer = info.buffer;
15288                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15289                 buffer.stream = info.stream;
15290                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15291             }
15292 
15293             if (pendingRequest->input_buffer != nullptr) {
15294                 camera3_capture_result result = {};
15295                 result.frame_number = pendingRequest->frame_number;
15296                 result.result = nullptr;
15297                 result.input_buffer = pendingRequest->input_buffer;
15298                 orchestrateResult(&result);
15299             }
15300 
15301             mShutterDispatcher.clear(pendingRequest->frame_number);
15302             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15303             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15304         }
15305     }
15306 
15307     /* Reset pending frame Drop list and requests list */
15308     mPendingFrameDropList.clear();
15309     mShutterDispatcher.clear();
15310     mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
15311     mPendingBuffersMap.mPendingBuffersInRequest.clear();
15312     mExpectedFrameDuration = 0;
15313     mExpectedInflightDuration = 0;
15314     LOGH("Cleared all the pending buffers ");
15315 
15316     return NO_ERROR;
15317 }
15318 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)15319 bool QCamera3HardwareInterface::isOnEncoder(
15320         const cam_dimension_t max_viewfinder_size,
15321         uint32_t width, uint32_t height)
15322 {
15323     return ((width > (uint32_t)max_viewfinder_size.width) ||
15324             (height > (uint32_t)max_viewfinder_size.height) ||
15325             (width > (uint32_t)VIDEO_4K_WIDTH) ||
15326             (height > (uint32_t)VIDEO_4K_HEIGHT));
15327 }
15328 
15329 /*===========================================================================
15330  * FUNCTION   : setBundleInfo
15331  *
15332  * DESCRIPTION: Set bundle info for all streams that are bundle.
15333  *
15334  * PARAMETERS : None
15335  *
15336  * RETURN     : NO_ERROR on success
15337  *              Error codes on failure
15338  *==========================================================================*/
setBundleInfo()15339 int32_t QCamera3HardwareInterface::setBundleInfo()
15340 {
15341     int32_t rc = NO_ERROR;
15342 
15343     if (mChannelHandle) {
15344         cam_bundle_config_t bundleInfo;
15345         memset(&bundleInfo, 0, sizeof(bundleInfo));
15346         rc = mCameraHandle->ops->get_bundle_info(
15347                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
15348         if (rc != NO_ERROR) {
15349             LOGE("get_bundle_info failed");
15350             return rc;
15351         }
15352         if (mAnalysisChannel) {
15353             mAnalysisChannel->setBundleInfo(bundleInfo);
15354         }
15355         if (mSupportChannel) {
15356             mSupportChannel->setBundleInfo(bundleInfo);
15357         }
15358         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15359                 it != mStreamInfo.end(); it++) {
15360             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15361             channel->setBundleInfo(bundleInfo);
15362         }
15363         if (mRawDumpChannel) {
15364             mRawDumpChannel->setBundleInfo(bundleInfo);
15365         }
15366         if (mHdrPlusRawSrcChannel) {
15367             mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
15368         }
15369     }
15370 
15371     return rc;
15372 }
15373 
15374 /*===========================================================================
15375  * FUNCTION   : setInstantAEC
15376  *
15377  * DESCRIPTION: Set Instant AEC related params.
15378  *
15379  * PARAMETERS :
15380  *      @meta: CameraMetadata reference
15381  *
15382  * RETURN     : NO_ERROR on success
15383  *              Error codes on failure
15384  *==========================================================================*/
setInstantAEC(const CameraMetadata & meta)15385 int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
15386 {
15387     int32_t rc = NO_ERROR;
15388     uint8_t val = 0;
15389     char prop[PROPERTY_VALUE_MAX];
15390 
15391     // First try to configure instant AEC from framework metadata
15392     if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
15393         val = meta.find(QCAMERA3_INSTANT_AEC_MODE).data.u8[0];
15394         LOGE("Instant AEC mode set: %d", val);
15395     }
15396 
15397     // If framework did not set this value, try to read from set prop.
15398     if (val == 0) {
15399         memset(prop, 0, sizeof(prop));
15400         property_get("persist.camera.instant.aec", prop, "0");
15401         val = (uint8_t)atoi(prop);
15402     }
15403 
15404     if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
15405            ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
15406         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
15407         mInstantAEC = val;
15408         mInstantAECSettledFrameNumber = 0;
15409         mInstantAecFrameIdxCount = 0;
15410         LOGH("instantAEC value set %d",val);
15411         if (mInstantAEC) {
15412             memset(prop, 0, sizeof(prop));
15413             property_get("persist.camera.ae.instant.bound", prop, "10");
15414             int32_t aec_frame_skip_cnt = atoi(prop);
15415             if (aec_frame_skip_cnt >= 0) {
15416                 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
15417             } else {
15418                 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
15419                 rc = BAD_VALUE;
15420             }
15421         }
15422     } else {
15423         LOGE("Bad instant aec value set %d", val);
15424         rc = BAD_VALUE;
15425     }
15426     return rc;
15427 }
15428 
15429 /*===========================================================================
15430  * FUNCTION   : get_num_overall_buffers
15431  *
15432  * DESCRIPTION: Estimate number of pending buffers across all requests.
15433  *
15434  * PARAMETERS : None
15435  *
15436  * RETURN     : Number of overall pending buffers
15437  *
15438  *==========================================================================*/
get_num_overall_buffers()15439 uint32_t PendingBuffersMap::get_num_overall_buffers()
15440 {
15441     uint32_t sum_buffers = 0;
15442     for (auto &req : mPendingBuffersInRequest) {
15443         sum_buffers += req.mPendingBufferList.size();
15444     }
15445     return sum_buffers;
15446 }
15447 
15448 /*===========================================================================
15449  * FUNCTION   : removeBuf
15450  *
15451  * DESCRIPTION: Remove a matching buffer from tracker.
15452  *
15453  * PARAMETERS : @buffer: image buffer for the callback
15454  *
15455  * RETURN     : None
15456  *
15457  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)15458 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
15459 {
15460     bool buffer_found = false;
15461     for (auto req = mPendingBuffersInRequest.begin();
15462             req != mPendingBuffersInRequest.end(); req++) {
15463         for (auto k = req->mPendingBufferList.begin();
15464                 k != req->mPendingBufferList.end(); k++ ) {
15465             if (k->buffer == buffer) {
15466                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
15467                         req->frame_number, buffer);
15468                 k = req->mPendingBufferList.erase(k);
15469                 if (req->mPendingBufferList.empty()) {
15470                     // Remove this request from Map
15471                     req = mPendingBuffersInRequest.erase(req);
15472                 }
15473                 buffer_found = true;
15474                 break;
15475             }
15476         }
15477         if (buffer_found) {
15478             break;
15479         }
15480     }
15481     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
15482             get_num_overall_buffers());
15483 }
15484 
15485 /*===========================================================================
15486  * FUNCTION   : getBufErrStatus
15487  *
15488  * DESCRIPTION: get buffer error status
15489  *
15490  * PARAMETERS : @buffer: buffer handle
15491  *
15492  * RETURN     : Error status
15493  *
15494  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)15495 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
15496 {
15497     for (auto& req : mPendingBuffersInRequest) {
15498         for (auto& k : req.mPendingBufferList) {
15499             if (k.buffer == buffer)
15500                 return k.bufStatus;
15501         }
15502     }
15503     return CAMERA3_BUFFER_STATUS_OK;
15504 }
15505 
15506 /*===========================================================================
15507  * FUNCTION   : setPAAFSupport
15508  *
15509  * DESCRIPTION: Set the preview-assisted auto focus support bit in
15510  *              feature mask according to stream type and filter
15511  *              arrangement
15512  *
15513  * PARAMETERS : @feature_mask: current feature mask, which may be modified
15514  *              @stream_type: stream type
15515  *              @filter_arrangement: filter arrangement
15516  *
15517  * RETURN     : None
15518  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)15519 void QCamera3HardwareInterface::setPAAFSupport(
15520         cam_feature_mask_t& feature_mask,
15521         cam_stream_type_t stream_type,
15522         cam_color_filter_arrangement_t filter_arrangement)
15523 {
15524     switch (filter_arrangement) {
15525     case CAM_FILTER_ARRANGEMENT_RGGB:
15526     case CAM_FILTER_ARRANGEMENT_GRBG:
15527     case CAM_FILTER_ARRANGEMENT_GBRG:
15528     case CAM_FILTER_ARRANGEMENT_BGGR:
15529         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
15530                 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
15531                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
15532             if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
15533                 feature_mask |= CAM_QCOM_FEATURE_PAAF;
15534         }
15535         break;
15536     case CAM_FILTER_ARRANGEMENT_Y:
15537         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
15538             feature_mask |= CAM_QCOM_FEATURE_PAAF;
15539         }
15540         break;
15541     default:
15542         break;
15543     }
15544     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
15545             feature_mask, stream_type, filter_arrangement);
15546 
15547 
15548 }
15549 
15550 /*===========================================================================
15551 * FUNCTION   : getSensorMountAngle
15552 *
15553 * DESCRIPTION: Retrieve sensor mount angle
15554 *
15555 * PARAMETERS : None
15556 *
15557 * RETURN     : sensor mount angle in uint32_t
15558 *==========================================================================*/
getSensorMountAngle()15559 uint32_t QCamera3HardwareInterface::getSensorMountAngle()
15560 {
15561     return gCamCapability[mCameraId]->sensor_mount_angle;
15562 }
15563 
15564 /*===========================================================================
15565 * FUNCTION   : getRelatedCalibrationData
15566 *
15567 * DESCRIPTION: Retrieve related system calibration data
15568 *
15569 * PARAMETERS : None
15570 *
15571 * RETURN     : Pointer of related system calibration data
15572 *==========================================================================*/
getRelatedCalibrationData()15573 const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
15574 {
15575     return (const cam_related_system_calibration_data_t *)
15576             &(gCamCapability[mCameraId]->related_cam_calibration);
15577 }
15578 
15579 /*===========================================================================
15580  * FUNCTION   : is60HzZone
15581  *
15582  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
15583  *
15584  * PARAMETERS : None
15585  *
15586  * RETURN     : True if in 60Hz zone, False otherwise
15587  *==========================================================================*/
is60HzZone()15588 bool QCamera3HardwareInterface::is60HzZone()
15589 {
15590     time_t t = time(NULL);
15591     struct tm lt;
15592 
15593     struct tm* r = localtime_r(&t, &lt);
15594 
15595     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
15596         return true;
15597     else
15598         return false;
15599 }
15600 
15601 /*===========================================================================
15602  * FUNCTION   : adjustBlackLevelForCFA
15603  *
15604  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
15605  *              of bayer CFA (Color Filter Array).
15606  *
15607  * PARAMETERS : @input: black level pattern in the order of RGGB
15608  *              @output: black level pattern in the order of CFA
15609  *              @color_arrangement: CFA color arrangement
15610  *
15611  * RETURN     : None
15612  *==========================================================================*/
15613 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)15614 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
15615         T input[BLACK_LEVEL_PATTERN_CNT],
15616         T output[BLACK_LEVEL_PATTERN_CNT],
15617         cam_color_filter_arrangement_t color_arrangement)
15618 {
15619     switch (color_arrangement) {
15620     case CAM_FILTER_ARRANGEMENT_GRBG:
15621         output[0] = input[1];
15622         output[1] = input[0];
15623         output[2] = input[3];
15624         output[3] = input[2];
15625         break;
15626     case CAM_FILTER_ARRANGEMENT_GBRG:
15627         output[0] = input[2];
15628         output[1] = input[3];
15629         output[2] = input[0];
15630         output[3] = input[1];
15631         break;
15632     case CAM_FILTER_ARRANGEMENT_BGGR:
15633         output[0] = input[3];
15634         output[1] = input[2];
15635         output[2] = input[1];
15636         output[3] = input[0];
15637         break;
15638     case CAM_FILTER_ARRANGEMENT_RGGB:
15639         output[0] = input[0];
15640         output[1] = input[1];
15641         output[2] = input[2];
15642         output[3] = input[3];
15643         break;
15644     default:
15645         LOGE("Invalid color arrangement to derive dynamic blacklevel");
15646         break;
15647     }
15648 }
15649 
updateHdrPlusResultMetadata(CameraMetadata & resultMetadata,std::shared_ptr<metadata_buffer_t> settings)15650 void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
15651     CameraMetadata &resultMetadata,
15652     std::shared_ptr<metadata_buffer_t> settings)
15653 {
15654     if (settings == nullptr) {
15655         ALOGE("%s: settings is nullptr.", __FUNCTION__);
15656         return;
15657     }
15658 
15659     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
15660         resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
15661     } else {
15662         resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
15663     }
15664 
15665     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
15666         String8 str((const char *)gps_methods);
15667         resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
15668     } else {
15669         resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
15670     }
15671 
15672     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
15673         resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
15674     } else {
15675         resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
15676     }
15677 
15678     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
15679         resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
15680     } else {
15681         resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
15682     }
15683 
15684     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
15685         uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
15686         resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
15687     } else {
15688         resultMetadata.erase(ANDROID_JPEG_QUALITY);
15689     }
15690 
15691     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
15692         uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
15693         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
15694     } else {
15695         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
15696     }
15697 
15698     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
15699         int32_t fwk_thumb_size[2];
15700         fwk_thumb_size[0] = thumb_size->width;
15701         fwk_thumb_size[1] = thumb_size->height;
15702         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
15703     } else {
15704         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
15705     }
15706 
15707     IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
15708         uint8_t fwk_intent = intent[0];
15709         resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
15710     } else {
15711         resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
15712     }
15713 }
15714 
isRequestHdrPlusCompatible(const camera3_capture_request_t & request,const CameraMetadata & metadata)15715 bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
15716         const camera3_capture_request_t &request, const CameraMetadata &metadata) {
15717     if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
15718             metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
15719         ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
15720         return false;
15721     }
15722 
15723     if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
15724          metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
15725             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
15726         ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
15727                 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
15728         return false;
15729     }
15730 
15731     if (!metadata.exists(ANDROID_EDGE_MODE) ||
15732             metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
15733         ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
15734         return false;
15735     }
15736 
15737     if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
15738             metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
15739                     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
15740         ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
15741         return false;
15742     }
15743 
15744     if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
15745             (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
15746              metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
15747                     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
15748         ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15749         return false;
15750     }
15751 
15752     if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15753             metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15754         ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15755         return false;
15756     }
15757 
15758     if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15759             metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15760                     ANDROID_CONTROL_EFFECT_MODE_OFF) {
15761         ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15762         return false;
15763     }
15764 
15765     if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15766             (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15767              metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15768                     ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15769         ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15770         return false;
15771     }
15772 
15773     // TODO (b/32585046): support non-ZSL.
15774     if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15775          metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15776         ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15777         return false;
15778     }
15779 
15780     // TODO (b/32586081): support flash.
15781     if (!metadata.exists(ANDROID_FLASH_MODE) ||
15782          metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15783         ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15784         return false;
15785     }
15786 
15787     if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15788          metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15789         ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15790         return false;
15791     }
15792 
15793     switch (request.output_buffers[0].stream->format) {
15794         case HAL_PIXEL_FORMAT_BLOB:
15795         case HAL_PIXEL_FORMAT_YCbCr_420_888:
15796         case HAL_PIXEL_FORMAT_Y8:
15797         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15798             break;
15799         default:
15800             ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15801             for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15802                 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15803                         request.output_buffers[0].stream->width,
15804                         request.output_buffers[0].stream->height,
15805                         request.output_buffers[0].stream->format);
15806             }
15807             return false;
15808     }
15809 
15810     return true;
15811 }
15812 
abortPendingHdrplusRequest(HdrPlusPendingRequest * hdrPlusRequest)15813 void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15814     if (hdrPlusRequest == nullptr) return;
15815 
15816     for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15817         // Find the stream for this buffer.
15818         for (auto streamInfo : mStreamInfo) {
15819             if (streamInfo->id == outputBufferIter.first) {
15820                 if (streamInfo->channel == mPictureChannel) {
15821                     // For picture channel, this buffer is internally allocated so return this
15822                     // buffer to picture channel.
15823                     mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15824                 } else {
15825                     // Unregister this buffer for other channels.
15826                     streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15827                 }
15828                 break;
15829             }
15830         }
15831     }
15832 
15833     hdrPlusRequest->outputBuffers.clear();
15834     hdrPlusRequest->frameworkOutputBuffers.clear();
15835 }
15836 
15837 /*===========================================================================
15838  * FUNCTION   : isEISCropInSnapshotNeeded
15839  *
15840  * DESCRIPTION: In case EIS is active, check whether additional crop is needed
15841  *              to avoid FOV jumps in snapshot streams.
15842  *
15843  * PARAMETERS : @metadata: Current request settings.
15844  *
15845  * RETURN     : True in case EIS crop is needed, False otherwise.
15846  *==========================================================================*/
isEISCropInSnapshotNeeded(const CameraMetadata & metadata) const15847 bool QCamera3HardwareInterface::isEISCropInSnapshotNeeded(const CameraMetadata &metadata) const
15848 {
15849     if (metadata.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
15850         uint8_t vstabMode =
15851             metadata.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
15852         if (vstabMode == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) {
15853             if ((mLastEISCropInfo.delta_x != 0) || (mLastEISCropInfo.delta_y != 0) ||
15854                     (mLastEISCropInfo.delta_width != 0) || (mLastEISCropInfo.delta_height != 0)) {
15855                 return true;
15856             }
15857         }
15858     }
15859 
15860     return false;
15861 }
15862 
15863 /*===========================================================================
15864  * FUNCTION   : isCropValid
15865  *
15866  * DESCRIPTION: Crop sanity checks.
15867  *
15868  * PARAMETERS : @startX: Horizontal crop offset.
15869  *              @startY: Vertical crop offset.
15870  *              @width: Crop width.
15871  *              @height: Crop height.
15872  *              @maxWidth: Horizontal maximum size.
15873  *              @maxHeight: Vertical maximum size.
15874  *
15875  * RETURN     : True in case crop is valid, False otherwise.
15876  *==========================================================================*/
isCropValid(int32_t startX,int32_t startY,int32_t width,int32_t height,int32_t maxWidth,int32_t maxHeight) const15877 bool QCamera3HardwareInterface::isCropValid(int32_t startX, int32_t startY, int32_t width,
15878         int32_t height, int32_t maxWidth, int32_t maxHeight) const
15879 {
15880     if ((startX < 0) || (startY < 0) || (startX >= maxWidth) || (startY >= maxHeight)) {
15881         LOGE("Crop offset is invalid: %dx%d", startX, startY);
15882         return false;
15883     }
15884 
15885     if ((width < 0) || (height < 0) || (width >= maxWidth) || (height >= maxHeight)) {
15886         LOGE("Crop dimensions are invalid: %dx%d", width, height);
15887         return false;
15888     }
15889 
15890     if (((startX + width) > maxWidth)  || ((startY + height) > maxHeight)) {
15891         LOGE("Crop is out of bounds: %dx%d max %dx%d", startX + width, startY + height, maxWidth,
15892                 maxHeight);
15893         return false;
15894     }
15895 
15896     return true;
15897 }
15898 
trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest * hdrPlusRequest,const camera3_capture_request_t & request,const CameraMetadata & metadata)15899 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15900         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15901         const CameraMetadata &metadata)
15902 {
15903     if (hdrPlusRequest == nullptr) return false;
15904     if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15905 
15906     status_t res = OK;
15907     pbcamera::CaptureRequest pbRequest;
15908     pbRequest.id = request.frame_number;
15909     // Iterate through all requested output buffers and add them to an HDR+ request.
15910     for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15911         // Find the index of the stream in mStreamInfo.
15912         uint32_t pbStreamId = 0;
15913         bool found = false;
15914         for (auto streamInfo : mStreamInfo) {
15915             if (streamInfo->stream == request.output_buffers[i].stream) {
15916                 pbStreamId = streamInfo->id;
15917                 found = true;
15918                 break;
15919             }
15920         }
15921 
15922         if (!found) {
15923             ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15924             abortPendingHdrplusRequest(hdrPlusRequest);
15925             return false;
15926         }
15927         auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15928         switch (request.output_buffers[i].stream->format) {
15929             case HAL_PIXEL_FORMAT_BLOB:
15930             {
15931                 // For jpeg output, get a YUV buffer from pic channel.
15932                 QCamera3PicChannel *picChannel =
15933                         (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15934                 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15935                 if (res != OK) {
15936                     ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15937                             __FUNCTION__, strerror(-res), res);
15938                     abortPendingHdrplusRequest(hdrPlusRequest);
15939                     return false;
15940                 }
15941                 break;
15942             }
15943             case HAL_PIXEL_FORMAT_YCbCr_420_888:
15944             case HAL_PIXEL_FORMAT_Y8:
15945             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15946             {
15947                 // For YUV output, register the buffer and get the buffer def from the channel.
15948                 QCamera3ProcessingChannel *channel =
15949                         (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15950                 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15951                         outBuffer.get());
15952                 if (res != OK) {
15953                     ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15954                             strerror(-res), res);
15955                     abortPendingHdrplusRequest(hdrPlusRequest);
15956                     return false;
15957                 }
15958                 break;
15959             }
15960             default:
15961                 abortPendingHdrplusRequest(hdrPlusRequest);
15962                 return false;
15963         }
15964 
15965         pbcamera::StreamBuffer buffer;
15966         buffer.streamId = pbStreamId;
15967         buffer.dmaBufFd = outBuffer->fd;
15968         buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15969         buffer.dataSize = outBuffer->frame_len;
15970 
15971         pbRequest.outputBuffers.push_back(buffer);
15972 
15973         hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15974         hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15975     }
15976 
15977     float zoomRatio = 1.0f;
15978     camera_metadata_ro_entry zoomRatioEntry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
15979     if (zoomRatioEntry.count == 1) {
15980         zoomRatio = MIN(MAX(zoomRatioEntry.data.f[0], 1.0f), gCamCapability[mCameraId]->max_zoom);
15981     }
15982 
15983     // Capture requests should not be modified.
15984     CameraMetadata updatedMetadata(metadata);
15985     camera_metadata_entry entry = updatedMetadata.find(ANDROID_SCALER_CROP_REGION);
15986     if (isEISCropInSnapshotNeeded(metadata)) {
15987         int32_t scalerRegion[4] = {0, 0, gCamCapability[mCameraId]->active_array_size.width,
15988             gCamCapability[mCameraId]->active_array_size.height};
15989         if (entry.count == 4) {
15990             auto currentScalerRegion = metadata.find(ANDROID_SCALER_CROP_REGION).data.i32;
15991             scalerRegion[0] = currentScalerRegion[0];
15992             scalerRegion[1] = currentScalerRegion[1];
15993             scalerRegion[2] = currentScalerRegion[2];
15994             scalerRegion[3] = currentScalerRegion[3];
15995 
15996             // Apply zoom ratio to generate new crop region
15997             mCropRegionMapper.applyZoomRatio(scalerRegion[0], scalerRegion[1],
15998                     scalerRegion[2], scalerRegion[3], zoomRatio);
15999 
16000             scalerRegion[0] = currentScalerRegion[0] + mLastEISCropInfo.delta_x;
16001             scalerRegion[1] = currentScalerRegion[1] + mLastEISCropInfo.delta_y;
16002             scalerRegion[2] = currentScalerRegion[2] - mLastEISCropInfo.delta_width;
16003             scalerRegion[3] = currentScalerRegion[3] - mLastEISCropInfo.delta_height;
16004         } else {
16005             scalerRegion[0] += mLastEISCropInfo.delta_x;
16006             scalerRegion[1] += mLastEISCropInfo.delta_y;
16007             scalerRegion[2] -= mLastEISCropInfo.delta_width;
16008             scalerRegion[3] -= mLastEISCropInfo.delta_height;
16009         }
16010 
16011         if (isCropValid(scalerRegion[0], scalerRegion[1], scalerRegion[2], scalerRegion[3],
16012                     gCamCapability[mCameraId]->active_array_size.width,
16013                     gCamCapability[mCameraId]->active_array_size.height)) {
16014             updatedMetadata.update(ANDROID_SCALER_CROP_REGION, scalerRegion, 4);
16015         } else {
16016             LOGE("Invalid EIS compensated crop region");
16017         }
16018     } else {
16019         if (entry.count == 4) {
16020             mCropRegionMapper.applyZoomRatio(entry.data.i32[0], entry.data.i32[1],
16021                     entry.data.i32[2], entry.data.i32[3], zoomRatio);
16022         }
16023     }
16024     res = gHdrPlusClient->submitCaptureRequest(&pbRequest, updatedMetadata);
16025 
16026     if (res != OK) {
16027         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
16028                 strerror(-res), res);
16029         abortPendingHdrplusRequest(hdrPlusRequest);
16030         return false;
16031     }
16032 
16033     return true;
16034 }
16035 
openHdrPlusClientAsyncLocked()16036 status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
16037 {
16038     if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
16039         return OK;
16040     }
16041 
16042     status_t res = gEaselManagerClient->openHdrPlusClientAsync(mQCamera3HdrPlusListenerThread.get());
16043     if (res != OK) {
16044         ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
16045                 strerror(-res), res);
16046         return res;
16047     }
16048     gHdrPlusClientOpening = true;
16049 
16050     return OK;
16051 }
16052 
enableHdrPlusModeLocked()16053 status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
16054 {
16055     status_t res;
16056 
16057     if (mHdrPlusModeEnabled) {
16058         return OK;
16059     }
16060 
16061     // Check if gHdrPlusClient is opened or being opened.
16062     if (gHdrPlusClient == nullptr) {
16063         if (gHdrPlusClientOpening) {
16064             // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
16065             return OK;
16066         }
16067 
16068         res = openHdrPlusClientAsyncLocked();
16069         if (res != OK) {
16070             ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
16071                     strerror(-res), res);
16072             return res;
16073         }
16074 
16075         // When opening HDR+ client completes, HDR+ mode will be enabled.
16076         return OK;
16077 
16078     }
16079 
16080     // Configure stream for HDR+.
16081     res = configureHdrPlusStreamsLocked();
16082     if (res != OK) {
16083         LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
16084         return res;
16085     }
16086 
16087     // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
16088     res = gHdrPlusClient->setZslHdrPlusMode(true);
16089     if (res != OK) {
16090         LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
16091         return res;
16092     }
16093 
16094     mHdrPlusModeEnabled = true;
16095     ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
16096 
16097     return OK;
16098 }
16099 
finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> & lock)16100 void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
16101 {
16102     if (gHdrPlusClientOpening) {
16103         gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
16104     }
16105 }
16106 
disableHdrPlusModeLocked()16107 void QCamera3HardwareInterface::disableHdrPlusModeLocked()
16108 {
16109     // Disable HDR+ mode.
16110     if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
16111         status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
16112         if (res != OK) {
16113             ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
16114         }
16115 
16116         // Close HDR+ client so Easel can enter low power mode.
16117         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16118         gHdrPlusClient = nullptr;
16119     }
16120 
16121     mHdrPlusModeEnabled = false;
16122     ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
16123 }
16124 
isSessionHdrPlusModeCompatible()16125 bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
16126 {
16127     // Check that at least one YUV or one JPEG output is configured.
16128     // TODO: Support RAW (b/36690506)
16129     for (auto streamInfo : mStreamInfo) {
16130         if (streamInfo != nullptr && streamInfo->stream != nullptr) {
16131             if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
16132                     (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
16133                      streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16134                      streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
16135                 return true;
16136             }
16137         }
16138     }
16139 
16140     return false;
16141 }
16142 
configureHdrPlusStreamsLocked()16143 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
16144 {
16145     pbcamera::InputConfiguration inputConfig;
16146     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
16147     status_t res = OK;
16148 
16149     // Sensor MIPI will send data to Easel.
16150     inputConfig.isSensorInput = true;
16151     inputConfig.sensorMode.cameraId = mCameraId;
16152     inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
16153     inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
16154     inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
16155     inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
16156     inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
16157     inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
16158     inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
16159 
16160     if (mSensorModeInfo.num_raw_bits != 10) {
16161         ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
16162                 mSensorModeInfo.num_raw_bits);
16163         return BAD_VALUE;
16164     }
16165 
16166     inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
16167 
16168     // Iterate through configured output streams in HAL and configure those streams in HDR+
16169     // service.
16170     for (auto streamInfo : mStreamInfo) {
16171         pbcamera::StreamConfiguration outputConfig;
16172         if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
16173             switch (streamInfo->stream->format) {
16174                 case HAL_PIXEL_FORMAT_BLOB:
16175                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
16176                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
16177                     res = fillPbStreamConfig(&outputConfig, streamInfo->id,
16178                             streamInfo->channel, /*stream index*/0);
16179                     if (res != OK) {
16180                         LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
16181                             __FUNCTION__, strerror(-res), res);
16182 
16183                         return res;
16184                     }
16185 
16186                     outputStreamConfigs.push_back(outputConfig);
16187                     break;
16188                 default:
16189                     // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
16190                     break;
16191             }
16192         }
16193     }
16194 
16195     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
16196     if (res != OK) {
16197         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
16198             strerror(-res), res);
16199         return res;
16200     }
16201 
16202     return OK;
16203 }
16204 
handleEaselFatalError()16205 void QCamera3HardwareInterface::handleEaselFatalError()
16206 {
16207     {
16208         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16209         if (gHdrPlusClient != nullptr) {
16210             gHdrPlusClient->nofityEaselFatalError();
16211         }
16212     }
16213 
16214     pthread_mutex_lock(&mMutex);
16215     mState = ERROR;
16216     pthread_mutex_unlock(&mMutex);
16217 
16218     handleCameraDeviceError(/*stopChannelImmediately*/true);
16219 }
16220 
cleanupEaselErrorFuture()16221 void QCamera3HardwareInterface::cleanupEaselErrorFuture()
16222 {
16223     {
16224         std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16225         if (!mEaselErrorFuture.valid()) {
16226             // If there is no Easel error, construct a dummy future to wait for.
16227             mEaselErrorFuture = std::async([]() { return; });
16228         }
16229     }
16230 
16231     mEaselErrorFuture.wait();
16232 }
16233 
handleEaselFatalErrorAsync()16234 void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
16235 {
16236     std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16237 
16238     if (mEaselErrorFuture.valid()) {
16239         // The error future has been invoked.
16240         return;
16241     }
16242 
16243     // Launch a future to handle the fatal error.
16244     mEaselErrorFuture = std::async(std::launch::async,
16245             &QCamera3HardwareInterface::handleEaselFatalError, this);
16246 }
16247 
onEaselFatalError(std::string errMsg)16248 void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
16249 {
16250     ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
16251     handleEaselFatalErrorAsync();
16252 }
16253 
closeHdrPlusClientLocked()16254 void QCamera3HardwareInterface::closeHdrPlusClientLocked()
16255 {
16256     if (gHdrPlusClient != nullptr) {
16257         // Disable HDR+ mode.
16258         disableHdrPlusModeLocked();
16259         // Disconnect Easel if it's connected.
16260         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16261         gHdrPlusClient = nullptr;
16262         ALOGD("HDR+ client closed.");
16263     }
16264 }
16265 
onThermalThrottle()16266 void QCamera3HardwareInterface::onThermalThrottle() {
16267     ALOGW("%s: Thermal throttling. Will close HDR+ client.", __FUNCTION__);
16268     // HDR+ will be disabled when HAL receives the next request and there is no
16269     // pending HDR+ request.
16270     mEaselThermalThrottled = true;
16271 }
16272 
onOpened(std::unique_ptr<HdrPlusClient> client)16273 void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
16274 {
16275     int rc = NO_ERROR;
16276 
16277     if (client == nullptr) {
16278         ALOGE("%s: Opened client is null.", __FUNCTION__);
16279         return;
16280     }
16281 
16282     logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
16283     ALOGI("%s: HDR+ client opened.", __FUNCTION__);
16284 
16285     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16286     if (!gHdrPlusClientOpening) {
16287         ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
16288         return;
16289     }
16290 
16291     gHdrPlusClient = std::move(client);
16292     gHdrPlusClientOpening = false;
16293     gHdrPlusClientOpenCond.notify_one();
16294 
16295     // Set static metadata.
16296     status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
16297     if (res != OK) {
16298         LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
16299             __FUNCTION__, strerror(-res), res);
16300         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16301         gHdrPlusClient = nullptr;
16302         return;
16303     }
16304 
16305     // Enable HDR+ mode.
16306     res = enableHdrPlusModeLocked();
16307     if (res != OK) {
16308         LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
16309     }
16310 
16311     // Get Easel firmware version
16312     if (EaselManagerClientOpened) {
16313         rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
16314         if (rc != OK) {
16315             ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
16316         } else {
16317             mEaselFwUpdated = true;
16318         }
16319     }
16320 }
16321 
onOpenFailed(status_t err)16322 void QCamera3HardwareInterface::onOpenFailed(status_t err)
16323 {
16324     ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
16325     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16326     gHdrPlusClientOpening = false;
16327     gHdrPlusClientOpenCond.notify_one();
16328 }
16329 
onFatalError()16330 void QCamera3HardwareInterface::onFatalError()
16331 {
16332     ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
16333     handleEaselFatalErrorAsync();
16334 }
16335 
onShutter(uint32_t requestId,int64_t apSensorTimestampNs)16336 void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
16337 {
16338     ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
16339             __LINE__, requestId, apSensorTimestampNs);
16340 
16341     mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
16342 }
16343 
onNextCaptureReady(uint32_t requestId)16344 void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
16345 {
16346     pthread_mutex_lock(&mMutex);
16347 
16348     // Find the pending request for this result metadata.
16349     auto requestIter = mPendingRequestsList.begin();
16350     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16351         requestIter++;
16352     }
16353 
16354     if (requestIter == mPendingRequestsList.end()) {
16355         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16356         pthread_mutex_unlock(&mMutex);
16357         return;
16358     }
16359 
16360     requestIter->partial_result_cnt++;
16361 
16362     CameraMetadata metadata;
16363     uint8_t ready = true;
16364     metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
16365 
16366     // Send it to framework.
16367     camera3_capture_result_t result = {};
16368 
16369     result.result = metadata.getAndLock();
16370     // Populate metadata result
16371     result.frame_number = requestId;
16372     result.num_output_buffers = 0;
16373     result.output_buffers = NULL;
16374     result.partial_result = requestIter->partial_result_cnt;
16375 
16376     orchestrateResult(&result);
16377     metadata.unlock(result.result);
16378 
16379     pthread_mutex_unlock(&mMutex);
16380 }
16381 
onPostview(uint32_t requestId,std::unique_ptr<std::vector<uint8_t>> postview,uint32_t width,uint32_t height,uint32_t stride,int32_t format)16382 void QCamera3HardwareInterface::onPostview(uint32_t requestId,
16383         std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
16384         uint32_t stride, int32_t format)
16385 {
16386     if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
16387         ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
16388                 __LINE__, width, height, requestId);
16389         char buf[FILENAME_MAX] = {};
16390         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
16391                 requestId, width, height);
16392 
16393         pbcamera::StreamConfiguration config = {};
16394         config.image.width = width;
16395         config.image.height = height;
16396         config.image.format = format;
16397 
16398         pbcamera::PlaneConfiguration plane = {};
16399         plane.stride = stride;
16400         plane.scanline = height;
16401 
16402         config.image.planes.push_back(plane);
16403 
16404         pbcamera::StreamBuffer buffer = {};
16405         buffer.streamId = 0;
16406         buffer.dmaBufFd = -1;
16407         buffer.data = postview->data();
16408         buffer.dataSize = postview->size();
16409 
16410         hdrplus_client_utils::writePpm(buf, config, buffer);
16411     }
16412 
16413     pthread_mutex_lock(&mMutex);
16414 
16415     // Find the pending request for this result metadata.
16416     auto requestIter = mPendingRequestsList.begin();
16417     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16418         requestIter++;
16419     }
16420 
16421     if (requestIter == mPendingRequestsList.end()) {
16422         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16423         pthread_mutex_unlock(&mMutex);
16424         return;
16425     }
16426 
16427     requestIter->partial_result_cnt++;
16428 
16429     CameraMetadata metadata;
16430     int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
16431             static_cast<int32_t>(stride)};
16432     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
16433     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
16434 
16435     // Send it to framework.
16436     camera3_capture_result_t result = {};
16437 
16438     result.result = metadata.getAndLock();
16439     // Populate metadata result
16440     result.frame_number = requestId;
16441     result.num_output_buffers = 0;
16442     result.output_buffers = NULL;
16443     result.partial_result = requestIter->partial_result_cnt;
16444 
16445     orchestrateResult(&result);
16446     metadata.unlock(result.result);
16447 
16448     pthread_mutex_unlock(&mMutex);
16449 }
16450 
onCaptureResult(pbcamera::CaptureResult * result,const camera_metadata_t & resultMetadata)16451 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
16452         const camera_metadata_t &resultMetadata)
16453 {
16454     if (result == nullptr) {
16455         ALOGE("%s: result is nullptr.", __FUNCTION__);
16456         return;
16457     }
16458 
16459     // Find the pending HDR+ request.
16460     HdrPlusPendingRequest pendingRequest;
16461     {
16462         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16463         auto req = mHdrPlusPendingRequests.find(result->requestId);
16464         pendingRequest = req->second;
16465     }
16466 
16467     // Update the result metadata with the settings of the HDR+ still capture request because
16468     // the result metadata belongs to a ZSL buffer.
16469     CameraMetadata metadata;
16470     metadata = &resultMetadata;
16471     updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
16472     camera_metadata_t* updatedResultMetadata = metadata.release();
16473 
16474     uint32_t halSnapshotStreamId = 0;
16475     if (mPictureChannel != nullptr) {
16476         halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
16477     }
16478 
16479     auto halMetadata = std::make_shared<metadata_buffer_t>();
16480     clear_metadata_buffer(halMetadata.get());
16481 
16482     // Convert updated result metadata to HAL metadata.
16483     status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
16484             halSnapshotStreamId, /*minFrameDuration*/0);
16485     if (res != 0) {
16486         ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
16487     }
16488 
16489     for (auto &outputBuffer : result->outputBuffers) {
16490         uint32_t streamId = outputBuffer.streamId;
16491 
16492         // Find the framework output buffer in the pending request.
16493         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16494         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16495             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16496                     streamId);
16497             continue;
16498         }
16499 
16500         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16501 
16502         // Find the channel for the output buffer.
16503         QCamera3ProcessingChannel *channel =
16504                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16505 
16506         // Find the output buffer def.
16507         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16508         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16509             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16510             continue;
16511         }
16512 
16513         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16514 
16515         // Check whether to dump the buffer.
16516         if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16517                 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
16518             // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
16519             char prop[PROPERTY_VALUE_MAX];
16520             property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
16521             bool dumpYuvOutput = atoi(prop);
16522 
16523             if (dumpYuvOutput) {
16524                 // Dump yuv buffer to a ppm file.
16525                 pbcamera::StreamConfiguration outputConfig;
16526                 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
16527                         channel, /*stream index*/0);
16528                 if (rc == OK) {
16529                     char buf[FILENAME_MAX] = {};
16530                     snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
16531                             result->requestId, streamId,
16532                             outputConfig.image.width, outputConfig.image.height);
16533 
16534                     hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
16535                 } else {
16536                     LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
16537                             "%s (%d).", __FUNCTION__, strerror(-rc), rc);
16538                 }
16539             }
16540         }
16541 
16542         if (channel == mPictureChannel) {
16543             android_errorWriteLog(0x534e4554, "150004253");
16544             // Keep a copy of outputBufferDef until the final JPEG buffer is
16545             // ready because the JPEG callback uses the mm_camera_buf_def_t
16546             // struct. The metaBufDef is stored in a shared_ptr to make sure
16547             // it's freed.
16548             std::shared_ptr<mm_camera_buf_def_t> metaBufDef =
16549                     std::make_shared<mm_camera_buf_def_t>();
16550             {
16551                 pthread_mutex_lock(&mMutex);
16552                 for (auto& pendingBuffers : mPendingBuffersMap.mPendingBuffersInRequest) {
16553                     if (pendingBuffers.frame_number == result->requestId) {
16554                         pendingBuffers.mHdrplusInputBuf = outputBufferDef;
16555                         pendingBuffers.mHdrplusInputMetaBuf = metaBufDef;
16556                         break;
16557                     }
16558                 }
16559                 pthread_mutex_unlock(&mMutex);
16560             }
16561 
16562             // Return the buffer to pic channel for encoding.
16563             mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
16564                     frameworkOutputBuffer->buffer, result->requestId,
16565                     halMetadata, metaBufDef.get());
16566         } else {
16567             // Return the buffer to camera framework.
16568             pthread_mutex_lock(&mMutex);
16569             handleBufferWithLock(frameworkOutputBuffer, result->requestId);
16570             channel->unregisterBuffer(outputBufferDef.get());
16571             pthread_mutex_unlock(&mMutex);
16572         }
16573     }
16574 
16575     // Send HDR+ metadata to framework.
16576     {
16577         pthread_mutex_lock(&mMutex);
16578 
16579         // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
16580         handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
16581         pthread_mutex_unlock(&mMutex);
16582     }
16583 
16584     // Remove the HDR+ pending request.
16585     {
16586         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16587         auto req = mHdrPlusPendingRequests.find(result->requestId);
16588         mHdrPlusPendingRequests.erase(req);
16589     }
16590 }
16591 
onFailedCaptureResult(pbcamera::CaptureResult * failedResult)16592 void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
16593 {
16594     if (failedResult == nullptr) {
16595         ALOGE("%s: Got an empty failed result.", __FUNCTION__);
16596         return;
16597     }
16598 
16599     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
16600 
16601     // Find the pending HDR+ request.
16602     HdrPlusPendingRequest pendingRequest;
16603     {
16604         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16605         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16606         if (req == mHdrPlusPendingRequests.end()) {
16607             ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
16608             return;
16609         }
16610         pendingRequest = req->second;
16611     }
16612 
16613     for (auto &outputBuffer : failedResult->outputBuffers) {
16614         uint32_t streamId = outputBuffer.streamId;
16615 
16616         // Find the channel
16617         // Find the framework output buffer in the pending request.
16618         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16619         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16620             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16621                     streamId);
16622             continue;
16623         }
16624 
16625         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16626 
16627         // Find the channel for the output buffer.
16628         QCamera3ProcessingChannel *channel =
16629                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16630 
16631         // Find the output buffer def.
16632         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16633         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16634             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16635             continue;
16636         }
16637 
16638         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16639 
16640         if (channel == mPictureChannel) {
16641             // Return the buffer to pic channel.
16642             mPictureChannel->returnYuvBuffer(outputBufferDef.get());
16643         } else {
16644             channel->unregisterBuffer(outputBufferDef.get());
16645         }
16646     }
16647 
16648     // Remove the HDR+ pending request.
16649     {
16650         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16651         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16652         mHdrPlusPendingRequests.erase(req);
16653     }
16654 
16655     pthread_mutex_lock(&mMutex);
16656 
16657     // Find the pending buffers.
16658     auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
16659     while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16660         if (pendingBuffers->frame_number == failedResult->requestId) {
16661             break;
16662         }
16663         pendingBuffers++;
16664     }
16665 
16666     // Send out request errors for the pending buffers.
16667     if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16668         std::vector<camera3_stream_buffer_t> streamBuffers;
16669         for (auto &buffer : pendingBuffers->mPendingBufferList) {
16670             // Prepare a stream buffer.
16671             camera3_stream_buffer_t streamBuffer = {};
16672             streamBuffer.stream = buffer.stream;
16673             streamBuffer.buffer = buffer.buffer;
16674             streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
16675             streamBuffer.acquire_fence = -1;
16676             streamBuffer.release_fence = -1;
16677 
16678             // Send out request error event.
16679             camera3_notify_msg_t notify_msg = {};
16680             notify_msg.type = CAMERA3_MSG_ERROR;
16681             notify_msg.message.error.frame_number = pendingBuffers->frame_number;
16682             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
16683             notify_msg.message.error.error_stream = buffer.stream;
16684 
16685             orchestrateNotify(&notify_msg);
16686             mOutputBufferDispatcher.markBufferReady(pendingBuffers->frame_number, streamBuffer);
16687         }
16688 
16689         mShutterDispatcher.clear(pendingBuffers->frame_number);
16690 
16691 
16692 
16693         // Remove pending buffers.
16694         mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
16695     }
16696 
16697     // Remove pending request.
16698     auto halRequest = mPendingRequestsList.begin();
16699     while (halRequest != mPendingRequestsList.end()) {
16700         if (halRequest->frame_number == failedResult->requestId) {
16701             mPendingRequestsList.erase(halRequest);
16702             break;
16703         }
16704         halRequest++;
16705     }
16706 
16707     pthread_mutex_unlock(&mMutex);
16708 }
16709 
readSensorCalibration(int activeArrayWidth,float poseRotation[4],float poseTranslation[3],float cameraIntrinsics[5],float radialDistortion[5])16710 bool QCamera3HardwareInterface::readSensorCalibration(
16711         int activeArrayWidth,
16712         float poseRotation[4], float poseTranslation[3],
16713         float cameraIntrinsics[5], float radialDistortion[5]) {
16714 
16715     const char* calibrationPath = "/persist/sensors/calibration/calibration.xml";
16716 
16717     using namespace tinyxml2;
16718 
16719     XMLDocument calibrationXml;
16720     XMLError err = calibrationXml.LoadFile(calibrationPath);
16721     if (err != XML_SUCCESS) {
16722         ALOGE("Unable to load calibration file '%s'. Error: %s",
16723                 calibrationPath, XMLDocument::ErrorIDToName(err));
16724         return false;
16725     }
16726     XMLElement *rig = calibrationXml.FirstChildElement("rig");
16727     if (rig == nullptr) {
16728         ALOGE("No 'rig' in calibration file");
16729         return false;
16730     }
16731     XMLElement *cam = rig->FirstChildElement("camera");
16732     XMLElement *camModel = nullptr;
16733     while (cam != nullptr) {
16734         camModel = cam->FirstChildElement("camera_model");
16735         if (camModel == nullptr) {
16736             ALOGE("No 'camera_model' in calibration file");
16737             return false;
16738         }
16739         int modelIndex = camModel->IntAttribute("index", -1);
16740         // Model index "0" has the calibration we need
16741         if (modelIndex == 0) {
16742             break;
16743         }
16744         cam = cam->NextSiblingElement("camera");
16745     }
16746     if (cam == nullptr) {
16747         ALOGE("No 'camera' in calibration file");
16748         return false;
16749     }
16750     const char *modelType = camModel->Attribute("type");
16751     if (modelType == nullptr || strcmp(modelType,"calibu_fu_fv_u0_v0_k1_k2_k3")) {
16752         ALOGE("Camera model is unknown type %s",
16753                 modelType ? modelType : "NULL");
16754         return false;
16755     }
16756     XMLElement *modelWidth = camModel->FirstChildElement("width");
16757     if (modelWidth == nullptr || modelWidth->GetText() == nullptr) {
16758         ALOGE("No camera model width in calibration file");
16759         return false;
16760     }
16761     int width = atoi(modelWidth->GetText());
16762     XMLElement *modelHeight = camModel->FirstChildElement("height");
16763     if (modelHeight == nullptr || modelHeight->GetText() == nullptr) {
16764         ALOGE("No camera model height in calibration file");
16765         return false;
16766     }
16767     int height = atoi(modelHeight->GetText());
16768     if (width <= 0 || height <= 0) {
16769         ALOGE("Bad model width or height in calibration file: %d x %d", width, height);
16770         return false;
16771     }
16772     ALOGI("Width: %d, Height: %d", width, height);
16773 
16774     XMLElement *modelParams = camModel->FirstChildElement("params");
16775     if (modelParams == nullptr) {
16776         ALOGE("No camera model params in calibration file");
16777         return false;
16778     }
16779     const char* paramText = modelParams->GetText();
16780     if (paramText == nullptr) {
16781         ALOGE("No parameters in params element in calibration file");
16782         return false;
16783     }
16784     ALOGI("Parameters: %s", paramText);
16785 
16786     // Parameter string is of the form "[ float; float; float ...]"
16787     float params[7];
16788     bool success = parseStringArray(paramText, params, 7);
16789     if (!success) {
16790         ALOGE("Malformed camera parameter string in calibration file");
16791         return false;
16792     }
16793 
16794     XMLElement *extCalib = rig->FirstChildElement("extrinsic_calibration");
16795     while (extCalib != nullptr) {
16796         int id = extCalib->IntAttribute("frame_B_id", -1);
16797         if (id == 0) {
16798             break;
16799         }
16800         extCalib = extCalib->NextSiblingElement("extrinsic_calibration");
16801     }
16802     if (extCalib == nullptr) {
16803         ALOGE("No 'extrinsic_calibration' in calibration file");
16804         return false;
16805     }
16806 
16807     XMLElement *q = extCalib->FirstChildElement("A_q_B");
16808     if (q == nullptr || q->GetText() == nullptr) {
16809         ALOGE("No extrinsic quarternion in calibration file");
16810         return false;
16811     }
16812     float rotation[4];
16813     success = parseStringArray(q->GetText(), rotation, 4);
16814     if (!success) {
16815         ALOGE("Malformed extrinsic quarternion string in calibration file");
16816         return false;
16817     }
16818 
16819     XMLElement *p = extCalib->FirstChildElement("A_p_B");
16820     if (p == nullptr || p->GetText() == nullptr) {
16821         ALOGE("No extrinsic translation in calibration file");
16822         return false;
16823     }
16824     float position[3];
16825     success = parseStringArray(p->GetText(), position, 3);
16826     if (!success) {
16827         ALOGE("Malformed extrinsic position string in calibration file");
16828         return false;
16829     }
16830 
16831     // Map from width x height to active array
16832     float scaleFactor = static_cast<float>(activeArrayWidth) / width;
16833 
16834     cameraIntrinsics[0] = params[0] * scaleFactor; // fu -> f_x
16835     cameraIntrinsics[1] = params[1] * scaleFactor; // fv -> f_y
16836     cameraIntrinsics[2] = params[2] * scaleFactor; // u0 -> c_x
16837     cameraIntrinsics[3] = params[3] * scaleFactor; // v0 -> c_y
16838     cameraIntrinsics[4] = 0; // s = 0
16839 
16840     radialDistortion[0] = params[4]; // k1 -> k_1
16841     radialDistortion[1] = params[5]; // k2 -> k_2
16842     radialDistortion[2] = params[6]; // k3 -> k_3
16843     radialDistortion[3] = 0; // k_4 = 0
16844     radialDistortion[4] = 0; // k_5 = 0
16845 
16846     for (int i = 0; i < 4; i++) {
16847         poseRotation[i] = rotation[i];
16848     }
16849     for (int i = 0; i < 3; i++) {
16850         poseTranslation[i] = position[i];
16851     }
16852 
16853     ALOGI("Intrinsics: %f, %f, %f, %f, %f", cameraIntrinsics[0],
16854             cameraIntrinsics[1], cameraIntrinsics[2],
16855             cameraIntrinsics[3], cameraIntrinsics[4]);
16856     ALOGI("Distortion: %f, %f, %f, %f, %f",
16857             radialDistortion[0], radialDistortion[1], radialDistortion[2], radialDistortion[3],
16858             radialDistortion[4]);
16859     ALOGI("Pose rotation: %f, %f, %f, %f",
16860             poseRotation[0], poseRotation[1], poseRotation[2], poseRotation[3]);
16861     ALOGI("Pose translation: %f, %f, %f",
16862             poseTranslation[0], poseTranslation[1], poseTranslation[2]);
16863 
16864     return true;
16865 }
16866 
parseStringArray(const char * str,float * dest,int count)16867 bool QCamera3HardwareInterface::parseStringArray(const char *str, float *dest, int count) {
16868     size_t idx = 0;
16869     size_t len = strlen(str);
16870     for (; idx < len; idx++) {
16871         if (str[idx] == '[') break;
16872     }
16873     const char *startParam = str + idx + 1;
16874     if (startParam >= str + len) {
16875         ALOGE("Malformed array: %s", str);
16876         return false;
16877     }
16878     char *endParam = nullptr;
16879     for (int i = 0; i < count; i++) {
16880         dest[i] = strtod(startParam, &endParam);
16881         if (startParam == endParam) {
16882             ALOGE("Malformed array, index %d: %s", i, str);
16883             return false;
16884         }
16885         startParam = endParam + 1;
16886         if (startParam >= str + len) {
16887             ALOGE("Malformed array, index %d: %s", i, str);
16888             return false;
16889         }
16890     }
16891     return true;
16892 }
16893 
ShutterDispatcher(QCamera3HardwareInterface * parent)16894 ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
16895         mParent(parent) {}
16896 
expectShutter(uint32_t frameNumber,bool isReprocess,bool isZsl)16897 void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess, bool isZsl)
16898 {
16899     std::lock_guard<std::mutex> lock(mLock);
16900 
16901     if (isReprocess) {
16902         mReprocessShutters.emplace(frameNumber, Shutter());
16903     } else if (isZsl) {
16904         mZslShutters.emplace(frameNumber, Shutter());
16905     } else {
16906         mShutters.emplace(frameNumber, Shutter());
16907     }
16908 }
16909 
markShutterReady(uint32_t frameNumber,uint64_t timestamp)16910 void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
16911 {
16912     std::lock_guard<std::mutex> lock(mLock);
16913 
16914     std::map<uint32_t, Shutter> *shutters = nullptr;
16915 
16916     // Find the shutter entry.
16917     auto shutter = mShutters.find(frameNumber);
16918     if (shutter != mShutters.end()) {
16919         shutters = &mShutters;
16920     } else {
16921         shutter = mReprocessShutters.find(frameNumber);
16922         if (shutter != mReprocessShutters.end()) {
16923             shutters = &mReprocessShutters;
16924         } else {
16925             shutter = mZslShutters.find(frameNumber);
16926             if (shutter != mZslShutters.end()) {
16927                 shutters = &mZslShutters;
16928             } else {
16929                 // Shutter was already sent.
16930                 return;
16931             }
16932         }
16933     }
16934 
16935     if (shutter->second.ready) {
16936         // If shutter is already ready, don't update timestamp again.
16937         return;
16938     }
16939 
16940     // Make this frame's shutter ready.
16941     shutter->second.ready = true;
16942     shutter->second.timestamp = timestamp;
16943 
16944     // Iterate throught the shutters and send out shuters until the one that's not ready yet.
16945     shutter = shutters->begin();
16946     while (shutter != shutters->end()) {
16947         if (!shutter->second.ready) {
16948             // If this shutter is not ready, the following shutters can't be sent.
16949             break;
16950         }
16951 
16952         camera3_notify_msg_t msg = {};
16953         msg.type = CAMERA3_MSG_SHUTTER;
16954         msg.message.shutter.frame_number = shutter->first;
16955         msg.message.shutter.timestamp = shutter->second.timestamp;
16956         mParent->orchestrateNotify(&msg);
16957 
16958         shutter = shutters->erase(shutter);
16959     }
16960 }
16961 
clear(uint32_t frameNumber)16962 void ShutterDispatcher::clear(uint32_t frameNumber)
16963 {
16964     std::lock_guard<std::mutex> lock(mLock);
16965     mShutters.erase(frameNumber);
16966     mReprocessShutters.erase(frameNumber);
16967     mZslShutters.erase(frameNumber);
16968 }
16969 
clear()16970 void ShutterDispatcher::clear()
16971 {
16972     std::lock_guard<std::mutex> lock(mLock);
16973 
16974     // Log errors for stale shutters.
16975     for (auto &shutter : mShutters) {
16976         ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
16977             __FUNCTION__, shutter.first, shutter.second.ready,
16978             shutter.second.timestamp);
16979     }
16980 
16981     // Log errors for stale reprocess shutters.
16982     for (auto &shutter : mReprocessShutters) {
16983         ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
16984             __FUNCTION__, shutter.first, shutter.second.ready,
16985             shutter.second.timestamp);
16986     }
16987 
16988     // Log errors for stale ZSL shutters.
16989     for (auto &shutter : mZslShutters) {
16990         ALOGE("%s: stale zsl shutter: frame number %u, ready %d, timestamp %" PRId64,
16991             __FUNCTION__, shutter.first, shutter.second.ready,
16992             shutter.second.timestamp);
16993     }
16994 
16995     mShutters.clear();
16996     mReprocessShutters.clear();
16997     mZslShutters.clear();
16998 }
16999 
OutputBufferDispatcher(QCamera3HardwareInterface * parent)17000 OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
17001         mParent(parent) {}
17002 
configureStreams(camera3_stream_configuration_t * streamList)17003 status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
17004 {
17005     std::lock_guard<std::mutex> lock(mLock);
17006     mStreamBuffers.clear();
17007     if (!streamList) {
17008         ALOGE("%s: streamList is nullptr.", __FUNCTION__);
17009         return -EINVAL;
17010     }
17011 
17012     // Create a "frame-number -> buffer" map for each stream.
17013     for (uint32_t i = 0; i < streamList->num_streams; i++) {
17014         mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
17015     }
17016 
17017     return OK;
17018 }
17019 
expectBuffer(uint32_t frameNumber,camera3_stream_t * stream)17020 status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
17021 {
17022     std::lock_guard<std::mutex> lock(mLock);
17023 
17024     // Find the "frame-number -> buffer" map for the stream.
17025     auto buffers = mStreamBuffers.find(stream);
17026     if (buffers == mStreamBuffers.end()) {
17027         ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
17028         return -EINVAL;
17029     }
17030 
17031     // Create an unready buffer for this frame number.
17032     buffers->second.emplace(frameNumber, Buffer());
17033     return OK;
17034 }
17035 
markBufferReady(uint32_t frameNumber,const camera3_stream_buffer_t & buffer)17036 void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
17037         const camera3_stream_buffer_t &buffer)
17038 {
17039     std::lock_guard<std::mutex> lock(mLock);
17040 
17041     // Find the frame number -> buffer map for the stream.
17042     auto buffers = mStreamBuffers.find(buffer.stream);
17043     if (buffers == mStreamBuffers.end()) {
17044         ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
17045         return;
17046     }
17047 
17048     // Find the unready buffer this frame number and mark it ready.
17049     auto pendingBuffer = buffers->second.find(frameNumber);
17050     if (pendingBuffer == buffers->second.end()) {
17051         ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
17052         return;
17053     }
17054 
17055     pendingBuffer->second.ready = true;
17056     pendingBuffer->second.buffer = buffer;
17057 
17058     // Iterate through the buffers and send out buffers until the one that's not ready yet.
17059     pendingBuffer = buffers->second.begin();
17060     while (pendingBuffer != buffers->second.end()) {
17061         if (!pendingBuffer->second.ready) {
17062             // If this buffer is not ready, the following buffers can't be sent.
17063             break;
17064         }
17065 
17066         camera3_capture_result_t result = {};
17067         result.frame_number = pendingBuffer->first;
17068         result.num_output_buffers = 1;
17069         result.output_buffers = &pendingBuffer->second.buffer;
17070 
17071         // Send out result with buffer errors.
17072         mParent->orchestrateResult(&result);
17073 
17074         pendingBuffer = buffers->second.erase(pendingBuffer);
17075     }
17076 }
17077 
clear(bool clearConfiguredStreams)17078 void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
17079 {
17080     std::lock_guard<std::mutex> lock(mLock);
17081 
17082     // Log errors for stale buffers.
17083     for (auto &buffers : mStreamBuffers) {
17084         for (auto &buffer : buffers.second) {
17085             ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
17086                 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
17087         }
17088         buffers.second.clear();
17089     }
17090 
17091     if (clearConfiguredStreams) {
17092         mStreamBuffers.clear();
17093     }
17094 }
17095 
17096 }; //end namespace qcamera
17097