• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2  *
3  * Redistribution and use in source and binary forms, with or without
4  * modification, are permitted provided that the following conditions are
5  * met:
6  *     * Redistributions of source code must retain the above copyright
7  *       notice, this list of conditions and the following disclaimer.
8  *     * Redistributions in binary form must reproduce the above
9  *       copyright notice, this list of conditions and the following
10  *       disclaimer in the documentation and/or other materials provided
11  *       with the distribution.
12  *     * Neither the name of The Linux Foundation nor the names of its
13  *       contributors may be used to endorse or promote products derived
14  *       from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  *
28  */
29  
30  #define LOG_TAG "QCamera3HWI"
31  //#define LOG_NDEBUG 0
32  
33  #define __STDC_LIMIT_MACROS
34  
35  // To remove
36  #include <cutils/properties.h>
37  
38  // System dependencies
39  #include <dlfcn.h>
40  #include <fcntl.h>
41  #include <stdio.h>
42  #include <stdlib.h>
43  #include "utils/Timers.h"
44  #include "sys/ioctl.h"
45  #include <time.h>
46  #include <sync/sync.h>
47  #include "gralloc_priv.h"
48  #include <map>
49  
50  // Display dependencies
51  #include "qdMetaData.h"
52  
53  // Camera dependencies
54  #include "android/QCamera3External.h"
55  #include "util/QCameraFlash.h"
56  #include "QCamera3HWI.h"
57  #include "QCamera3VendorTags.h"
58  #include "QCameraTrace.h"
59  
60  // XML parsing
61  #include "tinyxml2.h"
62  
63  #include "HdrPlusClientUtils.h"
64  
65  extern "C" {
66  #include "mm_camera_dbg.h"
67  }
68  #include "cam_cond.h"
69  
70  using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
71  using namespace android;
72  
73  namespace qcamera {
74  
75  #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
76  
77  #define EMPTY_PIPELINE_DELAY 2
78  // mm_camera has 2 partial results: 3A, and final result.
79  // HDR+ requests have 3 partial results: postview, next request ready, and final result.
80  #define PARTIAL_RESULT_COUNT 3
81  #define FRAME_SKIP_DELAY     0
82  
83  #define MAX_VALUE_8BIT ((1<<8)-1)
84  #define MAX_VALUE_10BIT ((1<<10)-1)
85  #define MAX_VALUE_12BIT ((1<<12)-1)
86  
87  #define VIDEO_4K_WIDTH  3840
88  #define VIDEO_4K_HEIGHT 2160
89  
90  #define MAX_EIS_WIDTH 3840
91  #define MAX_EIS_HEIGHT 2160
92  
93  #define MAX_RAW_STREAMS        1
94  #define MAX_STALLING_STREAMS   1
95  #define MAX_PROCESSED_STREAMS  3
96  /* Batch mode is enabled only if FPS set is equal to or greater than this */
97  #define MIN_FPS_FOR_BATCH_MODE (120)
98  #define PREVIEW_FPS_FOR_HFR    (30)
99  #define DEFAULT_VIDEO_FPS      (30.0)
100  #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
101  #define MAX_HFR_BATCH_SIZE     (8)
102  #define REGIONS_TUPLE_COUNT    5
103  // Set a threshold for detection of missing buffers //seconds
104  #define MISSING_REQUEST_BUF_TIMEOUT 10
105  #define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
106  #define FLUSH_TIMEOUT 3
107  #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
108  
109  #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
110                                                CAM_QCOM_FEATURE_CROP |\
111                                                CAM_QCOM_FEATURE_ROTATION |\
112                                                CAM_QCOM_FEATURE_SHARPNESS |\
113                                                CAM_QCOM_FEATURE_SCALE |\
114                                                CAM_QCOM_FEATURE_CAC |\
115                                                CAM_QCOM_FEATURE_CDS )
116  /* Per configuration size for static metadata length*/
117  #define PER_CONFIGURATION_SIZE_3 (3)
118  
119  #define TIMEOUT_NEVER -1
120  
121  /* Face rect indices */
122  #define FACE_LEFT              0
123  #define FACE_TOP               1
124  #define FACE_RIGHT             2
125  #define FACE_BOTTOM            3
126  #define FACE_WEIGHT            4
127  
128  /* Face landmarks indices */
129  #define LEFT_EYE_X             0
130  #define LEFT_EYE_Y             1
131  #define RIGHT_EYE_X            2
132  #define RIGHT_EYE_Y            3
133  #define MOUTH_X                4
134  #define MOUTH_Y                5
135  #define TOTAL_LANDMARK_INDICES 6
136  
137  // Max preferred zoom
138  #define MAX_PREFERRED_ZOOM_RATIO 7.0
139  
140  // Whether to check for the GPU stride padding, or use the default
141  //#define CHECK_GPU_PIXEL_ALIGNMENT
142  
143  cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144  const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145  extern pthread_mutex_t gCamLock;
146  volatile uint32_t gCamHal3LogLevel = 1;
147  extern uint8_t gNumCameraSessions;
148  
149  // Note that this doesn't support concurrent front and back camera b/35960155.
150  // The following Easel related variables must be protected by gHdrPlusClientLock.
151  std::unique_ptr<EaselManagerClient> gEaselManagerClient;
152  bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153  int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
154  std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
155  bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
156  std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
157  bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
158  bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
159  
160  // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
161  bool gEaselBypassOnly;
162  
163  std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
164  
165  
166  const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
167      {"On",  CAM_CDS_MODE_ON},
168      {"Off", CAM_CDS_MODE_OFF},
169      {"Auto",CAM_CDS_MODE_AUTO}
170  };
171  const QCamera3HardwareInterface::QCameraMap<
172          camera_metadata_enum_android_video_hdr_mode_t,
173          cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
174      { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
175      { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
176  };
177  
178  const QCamera3HardwareInterface::QCameraMap<
179          camera_metadata_enum_android_binning_correction_mode_t,
180          cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
181      { QCAMERA3_BINNING_CORRECTION_MODE_OFF,  CAM_BINNING_CORRECTION_MODE_OFF },
182      { QCAMERA3_BINNING_CORRECTION_MODE_ON,   CAM_BINNING_CORRECTION_MODE_ON }
183  };
184  
185  const QCamera3HardwareInterface::QCameraMap<
186          camera_metadata_enum_android_ir_mode_t,
187          cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
188      {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
189      {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
190      {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
191  };
192  
193  const QCamera3HardwareInterface::QCameraMap<
194          camera_metadata_enum_android_control_effect_mode_t,
195          cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
196      { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
197      { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
198      { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
199      { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
200      { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
201      { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
202      { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
203      { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
204      { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
205  };
206  
207  const QCamera3HardwareInterface::QCameraMap<
208          camera_metadata_enum_android_control_awb_mode_t,
209          cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
210      { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
211      { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
212      { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
213      { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
214      { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
215      { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
216      { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
217      { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
218      { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
219  };
220  
221  const QCamera3HardwareInterface::QCameraMap<
222          camera_metadata_enum_android_control_scene_mode_t,
223          cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
224      { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
225      { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
226      { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
227      { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
228      { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
229      { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
230      { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
231      { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
232      { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
233      { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
234      { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
235      { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
236      { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
237      { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
238      { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
239      { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE},
240      { ANDROID_CONTROL_SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR}
241  };
242  
243  const QCamera3HardwareInterface::QCameraMap<
244          camera_metadata_enum_android_control_af_mode_t,
245          cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
246      { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
247      { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
248      { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
249      { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
250      { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
251      { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
252      { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
253  };
254  
255  const QCamera3HardwareInterface::QCameraMap<
256          camera_metadata_enum_android_color_correction_aberration_mode_t,
257          cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
258      { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
259              CAM_COLOR_CORRECTION_ABERRATION_OFF },
260      { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
261              CAM_COLOR_CORRECTION_ABERRATION_FAST },
262      { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
263              CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
264  };
265  
266  const QCamera3HardwareInterface::QCameraMap<
267          camera_metadata_enum_android_control_ae_antibanding_mode_t,
268          cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
269      { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
270      { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
271      { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
272      { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
273  };
274  
275  const QCamera3HardwareInterface::QCameraMap<
276          camera_metadata_enum_android_control_ae_mode_t,
277          cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
278      { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
279      { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
280      { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
281      { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
282      { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
283      { ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
284  };
285  
286  const QCamera3HardwareInterface::QCameraMap<
287          camera_metadata_enum_android_flash_mode_t,
288          cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
289      { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
290      { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
291      { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
292  };
293  
294  const QCamera3HardwareInterface::QCameraMap<
295          camera_metadata_enum_android_statistics_face_detect_mode_t,
296          cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
297      { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
298      { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
299      { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
300  };
301  
302  const QCamera3HardwareInterface::QCameraMap<
303          camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
304          cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
305      { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
306        CAM_FOCUS_UNCALIBRATED },
307      { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
308        CAM_FOCUS_APPROXIMATE },
309      { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
310        CAM_FOCUS_CALIBRATED }
311  };
312  
313  const QCamera3HardwareInterface::QCameraMap<
314          camera_metadata_enum_android_lens_state_t,
315          cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
316      { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
317      { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
318  };
319  
320  const int32_t available_thumbnail_sizes[] = {0, 0,
321                                               176, 144,
322                                               240, 144,
323                                               256, 144,
324                                               240, 160,
325                                               256, 154,
326                                               240, 240,
327                                               320, 240};
328  
329  const QCamera3HardwareInterface::QCameraMap<
330          camera_metadata_enum_android_sensor_test_pattern_mode_t,
331          cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
332      { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
333      { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
334      { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
335      { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
336      { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
337      { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
338  };
339  
340  /* Since there is no mapping for all the options some Android enum are not listed.
341   * Also, the order in this list is important because while mapping from HAL to Android it will
342   * traverse from lower to higher index which means that for HAL values that are map to different
343   * Android values, the traverse logic will select the first one found.
344   */
345  const QCamera3HardwareInterface::QCameraMap<
346          camera_metadata_enum_android_sensor_reference_illuminant1_t,
347          cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
348      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
349      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
350      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
351      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
352      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
353      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
354      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
355      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
356      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
357      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
358      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
359      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
360      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
361      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
362      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
363      { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
364  };
365  
366  const QCamera3HardwareInterface::QCameraMap<
367          int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
368      { 60, CAM_HFR_MODE_60FPS},
369      { 90, CAM_HFR_MODE_90FPS},
370      { 120, CAM_HFR_MODE_120FPS},
371      { 150, CAM_HFR_MODE_150FPS},
372      { 180, CAM_HFR_MODE_180FPS},
373      { 210, CAM_HFR_MODE_210FPS},
374      { 240, CAM_HFR_MODE_240FPS},
375      { 480, CAM_HFR_MODE_480FPS},
376  };
377  
378  const QCamera3HardwareInterface::QCameraMap<
379          qcamera3_ext_instant_aec_mode_t,
380          cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
381      { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
382      { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
383      { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
384  };
385  
386  const QCamera3HardwareInterface::QCameraMap<
387          qcamera3_ext_exposure_meter_mode_t,
388          cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
389      { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
390      { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
391      { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
392      { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
393      { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
394      { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
395      { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
396  };
397  
398  const QCamera3HardwareInterface::QCameraMap<
399          qcamera3_ext_iso_mode_t,
400          cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
401      { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
402      { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
403      { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
404      { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
405      { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
406      { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
407      { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
408      { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
409  };
410  
411  camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
412      .initialize                         = QCamera3HardwareInterface::initialize,
413      .configure_streams                  = QCamera3HardwareInterface::configure_streams,
414      .register_stream_buffers            = NULL,
415      .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
416      .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
417      .get_metadata_vendor_tag_ops        = NULL,
418      .dump                               = QCamera3HardwareInterface::dump,
419      .flush                              = QCamera3HardwareInterface::flush,
420      .reserved                           = {0},
421  };
422  
423  // initialise to some default value
424  uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
425  
logEaselEvent(const char * tag,const char * event)426  static inline void logEaselEvent(const char *tag, const char *event) {
427      if (CC_UNLIKELY(gEaselProfilingEnabled)) {
428          struct timespec ts = {};
429          static int64_t kMsPerSec = 1000;
430          static int64_t kNsPerMs = 1000000;
431          status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
432          if (res != OK) {
433              ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
434          } else {
435              int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
436              ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
437          }
438      }
439  }
440  
441  /*===========================================================================
442   * FUNCTION   : QCamera3HardwareInterface
443   *
444   * DESCRIPTION: constructor of QCamera3HardwareInterface
445   *
446   * PARAMETERS :
447   *   @cameraId  : camera ID
448   *
449   * RETURN     : none
450   *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)451  QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
452          const camera_module_callbacks_t *callbacks)
453      : mCameraId(cameraId),
454        mCameraHandle(NULL),
455        mCameraInitialized(false),
456        mCallbackOps(NULL),
457        mMetadataChannel(NULL),
458        mPictureChannel(NULL),
459        mRawChannel(NULL),
460        mSupportChannel(NULL),
461        mAnalysisChannel(NULL),
462        mRawDumpChannel(NULL),
463        mHdrPlusRawSrcChannel(NULL),
464        mDummyBatchChannel(NULL),
465        mDepthChannel(NULL),
466        mDepthCloudMode(CAM_PD_DATA_SKIP),
467        mPerfLockMgr(),
468        mChannelHandle(0),
469        mFirstConfiguration(true),
470        mFlush(false),
471        mFlushPerf(false),
472        mParamHeap(NULL),
473        mParameters(NULL),
474        mPrevParameters(NULL),
475        m_ISTypeVideo(IS_TYPE_NONE),
476        m_bIsVideo(false),
477        m_bIs4KVideo(false),
478        m_bEisSupportedSize(false),
479        m_bEisEnable(false),
480        m_bEis3PropertyEnabled(false),
481        m_bAVTimerEnabled(false),
482        m_MobicatMask(0),
483        mShutterDispatcher(this),
484        mOutputBufferDispatcher(this),
485        mMinProcessedFrameDuration(0),
486        mMinJpegFrameDuration(0),
487        mMinRawFrameDuration(0),
488        mExpectedFrameDuration(0),
489        mExpectedInflightDuration(0),
490        mMetaFrameCount(0U),
491        mUpdateDebugLevel(false),
492        mCallbacks(callbacks),
493        mCaptureIntent(0),
494        mCacMode(0),
495        /* DevCamDebug metadata internal m control*/
496        mDevCamDebugMetaEnable(0),
497        /* DevCamDebug metadata end */
498        mBatchSize(0),
499        mToBeQueuedVidBufs(0),
500        mHFRVideoFps(DEFAULT_VIDEO_FPS),
501        mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
502        mStreamConfig(false),
503        mCommon(),
504        mFirstFrameNumberInBatch(0),
505        mNeedSensorRestart(false),
506        mPreviewStarted(false),
507        mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
508        mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
509        mPDSupported(false),
510        mPDIndex(0),
511        mInstantAEC(false),
512        mResetInstantAEC(false),
513        mInstantAECSettledFrameNumber(0),
514        mAecSkipDisplayFrameBound(0),
515        mInstantAecFrameIdxCount(0),
516        mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
517        mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
518        mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
519        mCurrFeatureState(0),
520        mLdafCalibExist(false),
521        mLastCustIntentFrmNum(-1),
522        mFirstMetadataCallback(true),
523        mState(CLOSED),
524        mIsDeviceLinked(false),
525        mIsMainCamera(true),
526        mLinkedCameraId(0),
527        m_pDualCamCmdHeap(NULL),
528        m_pDualCamCmdPtr(NULL),
529        mHdrPlusModeEnabled(false),
530        mZslEnabled(false),
531        mEaselMipiStarted(false),
532        mIsApInputUsedForHdrPlus(false),
533        mFirstPreviewIntentSeen(false),
534        m_bSensorHDREnabled(false),
535        mAfTrigger(),
536        mSceneDistance(-1),
537        mLastFocusDistance(0.0)
538  {
539      getLogLevel();
540      mCommon.init(gCamCapability[cameraId]);
541      mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
542  #ifndef USE_HAL_3_3
543      mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
544  #else
545      mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
546  #endif
547      mCameraDevice.common.close = close_camera_device;
548      mCameraDevice.ops = &mCameraOps;
549      mCameraDevice.priv = this;
550      gCamCapability[cameraId]->version = CAM_HAL_V3;
551      // TODO: hardcode for now until mctl add support for min_num_pp_bufs
552      //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
553      gCamCapability[cameraId]->min_num_pp_bufs = 3;
554  
555      PTHREAD_COND_INIT(&mBuffersCond);
556  
557      PTHREAD_COND_INIT(&mRequestCond);
558      mPendingLiveRequest = 0;
559      mCurrentRequestId = -1;
560      pthread_mutex_init(&mMutex, NULL);
561  
562      for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
563          mDefaultMetadata[i] = NULL;
564  
565      // Getting system props of different kinds
566      char prop[PROPERTY_VALUE_MAX];
567      memset(prop, 0, sizeof(prop));
568      property_get("persist.camera.raw.dump", prop, "0");
569      mEnableRawDump = atoi(prop);
570      property_get("persist.camera.hal3.force.hdr", prop, "0");
571      mForceHdrSnapshot = atoi(prop);
572  
573      if (mEnableRawDump)
574          LOGD("Raw dump from Camera HAL enabled");
575  
576      memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
577      memset(mLdafCalib, 0, sizeof(mLdafCalib));
578  
579      memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
580      mEaselFwUpdated = false;
581  
582      memset(prop, 0, sizeof(prop));
583      property_get("persist.camera.tnr.preview", prop, "0");
584      m_bTnrPreview = (uint8_t)atoi(prop);
585  
586      memset(prop, 0, sizeof(prop));
587      property_get("persist.camera.swtnr.preview", prop, "1");
588      m_bSwTnrPreview = (uint8_t)atoi(prop);
589  
590      memset(prop, 0, sizeof(prop));
591      property_get("persist.camera.tnr.video", prop, "1");
592      m_bTnrVideo = (uint8_t)atoi(prop);
593  
594      memset(prop, 0, sizeof(prop));
595      property_get("persist.camera.avtimer.debug", prop, "0");
596      m_debug_avtimer = (uint8_t)atoi(prop);
597      LOGI("AV timer enabled: %d", m_debug_avtimer);
598  
599      memset(prop, 0, sizeof(prop));
600      property_get("persist.camera.cacmode.disable", prop, "0");
601      m_cacModeDisabled = (uint8_t)atoi(prop);
602  
603      m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
604      m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
605  
606      //Load and read GPU library.
607      lib_surface_utils = NULL;
608      LINK_get_surface_pixel_alignment = NULL;
609      mSurfaceStridePadding = CAM_PAD_TO_64;
610  #ifdef CHECK_GPU_PIXEL_ALIGNMENT
611      lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
612      if (lib_surface_utils) {
613          *(void **)&LINK_get_surface_pixel_alignment =
614                  dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
615           if (LINK_get_surface_pixel_alignment) {
616               mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
617           }
618           dlclose(lib_surface_utils);
619      }
620  #endif
621      mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
622      mPDSupported = (0 <= mPDIndex) ? true : false;
623  
624      m60HzZone = is60HzZone();
625  }
626  
627  /*===========================================================================
628   * FUNCTION   : ~QCamera3HardwareInterface
629   *
630   * DESCRIPTION: destructor of QCamera3HardwareInterface
631   *
632   * PARAMETERS : none
633   *
634   * RETURN     : none
635   *==========================================================================*/
~QCamera3HardwareInterface()636  QCamera3HardwareInterface::~QCamera3HardwareInterface()
637  {
638      LOGD("E");
639  
640      int32_t rc = 0;
641  
642      // Clean up Easel error future first to avoid Easel error happens during destructor.
643      cleanupEaselErrorFuture();
644  
645      // Disable power hint and enable the perf lock for close camera
646      mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
647      mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
648  
649      // Close HDR+ client first before destroying HAL.
650      {
651          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
652          finishHdrPlusClientOpeningLocked(l);
653          if (gHdrPlusClient != nullptr) {
654              // Disable HDR+ mode.
655              disableHdrPlusModeLocked();
656              // Disconnect Easel if it's connected.
657              gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
658              gHdrPlusClient = nullptr;
659          }
660      }
661  
662      // unlink of dualcam during close camera
663      if (mIsDeviceLinked) {
664          cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
665                  &m_pDualCamCmdPtr->bundle_info;
666          m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
667          m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
668          pthread_mutex_lock(&gCamLock);
669  
670          if (mIsMainCamera == 1) {
671              m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
672              m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
673              m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
674              // related session id should be session id of linked session
675              m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
676          } else {
677              m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
678              m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
679              m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
680              m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
681          }
682          m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
683          pthread_mutex_unlock(&gCamLock);
684  
685          rc = mCameraHandle->ops->set_dual_cam_cmd(
686                  mCameraHandle->camera_handle);
687          if (rc < 0) {
688              LOGE("Dualcam: Unlink failed, but still proceed to close");
689          }
690      }
691  
692      /* We need to stop all streams before deleting any stream */
693      if (mRawDumpChannel) {
694          mRawDumpChannel->stop();
695      }
696  
697      if (mHdrPlusRawSrcChannel) {
698          mHdrPlusRawSrcChannel->stop();
699      }
700  
701      // NOTE: 'camera3_stream_t *' objects are already freed at
702      //        this stage by the framework
703      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
704          it != mStreamInfo.end(); it++) {
705          QCamera3ProcessingChannel *channel = (*it)->channel;
706          if (channel) {
707              channel->stop();
708          }
709      }
710      if (mSupportChannel)
711          mSupportChannel->stop();
712  
713      if (mAnalysisChannel) {
714          mAnalysisChannel->stop();
715      }
716      if (mMetadataChannel) {
717          mMetadataChannel->stop();
718      }
719      if (mChannelHandle) {
720          stopChannelLocked(/*stop_immediately*/false);
721      }
722  
723      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
724          it != mStreamInfo.end(); it++) {
725          QCamera3ProcessingChannel *channel = (*it)->channel;
726          if (channel)
727              delete channel;
728          free (*it);
729      }
730      if (mSupportChannel) {
731          delete mSupportChannel;
732          mSupportChannel = NULL;
733      }
734  
735      if (mAnalysisChannel) {
736          delete mAnalysisChannel;
737          mAnalysisChannel = NULL;
738      }
739      if (mRawDumpChannel) {
740          delete mRawDumpChannel;
741          mRawDumpChannel = NULL;
742      }
743      if (mHdrPlusRawSrcChannel) {
744          delete mHdrPlusRawSrcChannel;
745          mHdrPlusRawSrcChannel = NULL;
746      }
747      if (mDummyBatchChannel) {
748          delete mDummyBatchChannel;
749          mDummyBatchChannel = NULL;
750      }
751  
752      mPictureChannel = NULL;
753      mDepthChannel = NULL;
754  
755      if (mMetadataChannel) {
756          delete mMetadataChannel;
757          mMetadataChannel = NULL;
758      }
759  
760      /* Clean up all channels */
761      if (mCameraInitialized) {
762          if(!mFirstConfiguration){
763              //send the last unconfigure
764              cam_stream_size_info_t stream_config_info;
765              memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
766              stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
767              stream_config_info.buffer_info.max_buffers =
768                      m_bIs4KVideo ? 0 :
769                      m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
770              clear_metadata_buffer(mParameters);
771              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
772                      stream_config_info);
773              int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
774              if (rc < 0) {
775                  LOGE("set_parms failed for unconfigure");
776              }
777          }
778          deinitParameters();
779      }
780  
781      if (mChannelHandle) {
782          mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
783                  mChannelHandle);
784          LOGH("deleting channel %d", mChannelHandle);
785          mChannelHandle = 0;
786      }
787  
788      if (mState != CLOSED)
789          closeCamera();
790  
791      for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
792          req.mPendingBufferList.clear();
793      }
794      mPendingBuffersMap.mPendingBuffersInRequest.clear();
795      for (pendingRequestIterator i = mPendingRequestsList.begin();
796              i != mPendingRequestsList.end();) {
797          i = erasePendingRequest(i);
798      }
799      for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
800          if (mDefaultMetadata[i])
801              free_camera_metadata(mDefaultMetadata[i]);
802  
803      mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
804  
805      pthread_cond_destroy(&mRequestCond);
806  
807      pthread_cond_destroy(&mBuffersCond);
808  
809      pthread_mutex_destroy(&mMutex);
810      LOGD("X");
811  }
812  
813  /*===========================================================================
814   * FUNCTION   : erasePendingRequest
815   *
816   * DESCRIPTION: function to erase a desired pending request after freeing any
817   *              allocated memory
818   *
819   * PARAMETERS :
820   *   @i       : iterator pointing to pending request to be erased
821   *
822   * RETURN     : iterator pointing to the next request
823   *==========================================================================*/
824  QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)825          QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
826  {
827      if (i->input_buffer != NULL) {
828          free(i->input_buffer);
829          i->input_buffer = NULL;
830      }
831      if (i->settings != NULL)
832          free_camera_metadata((camera_metadata_t*)i->settings);
833  
834      mExpectedInflightDuration -= i->expectedFrameDuration;
835      if (mExpectedInflightDuration < 0) {
836          LOGE("Negative expected in-flight duration!");
837          mExpectedInflightDuration = 0;
838      }
839  
840      return mPendingRequestsList.erase(i);
841  }
842  
843  /*===========================================================================
844   * FUNCTION   : camEvtHandle
845   *
846   * DESCRIPTION: Function registered to mm-camera-interface to handle events
847   *
848   * PARAMETERS :
849   *   @camera_handle : interface layer camera handle
850   *   @evt           : ptr to event
851   *   @user_data     : user data ptr
852   *
853   * RETURN     : none
854   *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)855  void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
856                                            mm_camera_event_t *evt,
857                                            void *user_data)
858  {
859      QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
860      if (obj && evt) {
861          switch(evt->server_event_type) {
862              case CAM_EVENT_TYPE_DAEMON_DIED:
863                  pthread_mutex_lock(&obj->mMutex);
864                  obj->mState = ERROR;
865                  pthread_mutex_unlock(&obj->mMutex);
866                  LOGE("Fatal, camera daemon died");
867                  break;
868  
869              case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
870                  LOGD("HAL got request pull from Daemon");
871                  pthread_mutex_lock(&obj->mMutex);
872                  obj->mWokenUpByDaemon = true;
873                  obj->unblockRequestIfNecessary();
874                  pthread_mutex_unlock(&obj->mMutex);
875                  break;
876  
877              default:
878                  LOGW("Warning: Unhandled event %d",
879                          evt->server_event_type);
880                  break;
881          }
882      } else {
883          LOGE("NULL user_data/evt");
884      }
885  }
886  
887  /*===========================================================================
888   * FUNCTION   : openCamera
889   *
890   * DESCRIPTION: open camera
891   *
892   * PARAMETERS :
893   *   @hw_device  : double ptr for camera device struct
894   *
895   * RETURN     : int32_t type of status
896   *              NO_ERROR  -- success
897   *              none-zero failure code
898   *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)899  int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
900  {
901      int rc = 0;
902      if (mState != CLOSED) {
903          *hw_device = NULL;
904          return PERMISSION_DENIED;
905      }
906  
907      logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
908      mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
909      LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
910               mCameraId);
911  
912      if (mCameraHandle) {
913          LOGE("Failure: Camera already opened");
914          return ALREADY_EXISTS;
915      }
916  
917      {
918          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
919          if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
920              logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
921              if (gActiveEaselClient == 0) {
922                  rc = gEaselManagerClient->resume(this);
923                  if (rc != 0) {
924                      ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
925                      return rc;
926                  }
927                  mEaselFwUpdated = false;
928              }
929              gActiveEaselClient++;
930  
931              mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
932              rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
933              if (rc != OK) {
934                  ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
935                          strerror(-rc), rc);
936                  return rc;
937              }
938          }
939      }
940  
941      rc = openCamera();
942      if (rc == 0) {
943          *hw_device = &mCameraDevice.common;
944      } else {
945          *hw_device = NULL;
946  
947          // Suspend Easel because opening camera failed.
948          {
949              std::unique_lock<std::mutex> l(gHdrPlusClientLock);
950              if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
951                  if (gActiveEaselClient == 1) {
952                      status_t suspendErr = gEaselManagerClient->suspend();
953                      if (suspendErr != 0) {
954                          ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
955                                  strerror(-suspendErr), suspendErr);
956                      }
957                  }
958                  gActiveEaselClient--;
959              }
960  
961              if (mQCamera3HdrPlusListenerThread != nullptr) {
962                  mQCamera3HdrPlusListenerThread->requestExit();
963                  mQCamera3HdrPlusListenerThread->join();
964                  mQCamera3HdrPlusListenerThread = nullptr;
965              }
966          }
967      }
968  
969      LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
970               mCameraId, rc);
971  
972      if (rc == NO_ERROR) {
973          mState = OPENED;
974      }
975  
976      return rc;
977  }
978  
979  /*===========================================================================
980   * FUNCTION   : openCamera
981   *
982   * DESCRIPTION: open camera
983   *
984   * PARAMETERS : none
985   *
986   * RETURN     : int32_t type of status
987   *              NO_ERROR  -- success
988   *              none-zero failure code
989   *==========================================================================*/
openCamera()990  int QCamera3HardwareInterface::openCamera()
991  {
992      int rc = 0;
993      char value[PROPERTY_VALUE_MAX];
994  
995      KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
996  
997      rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
998      if (rc < 0) {
999          LOGE("Failed to reserve flash for camera id: %d",
1000                  mCameraId);
1001          return UNKNOWN_ERROR;
1002      }
1003  
1004      rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
1005      if (rc) {
1006          LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
1007          return rc;
1008      }
1009  
1010      if (!mCameraHandle) {
1011          LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
1012          return -ENODEV;
1013      }
1014  
1015      rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
1016              camEvtHandle, (void *)this);
1017  
1018      if (rc < 0) {
1019          LOGE("Error, failed to register event callback");
1020          /* Not closing camera here since it is already handled in destructor */
1021          return FAILED_TRANSACTION;
1022      }
1023  
1024      mExifParams.debug_params =
1025              (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
1026      if (mExifParams.debug_params) {
1027          memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
1028      } else {
1029          LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1030          return NO_MEMORY;
1031      }
1032      mFirstConfiguration = true;
1033  
1034      //Notify display HAL that a camera session is active.
1035      //But avoid calling the same during bootup because camera service might open/close
1036      //cameras at boot time during its initialization and display service will also internally
1037      //wait for camera service to initialize first while calling this display API, resulting in a
1038      //deadlock situation. Since boot time camera open/close calls are made only to fetch
1039      //capabilities, no need of this display bw optimization.
1040      //Use "service.bootanim.exit" property to know boot status.
1041      property_get("service.bootanim.exit", value, "0");
1042      if (atoi(value) == 1) {
1043          pthread_mutex_lock(&gCamLock);
1044          if (gNumCameraSessions++ == 0) {
1045              setCameraLaunchStatus(true);
1046          }
1047          pthread_mutex_unlock(&gCamLock);
1048      }
1049  
1050      //fill the session id needed while linking dual cam
1051      pthread_mutex_lock(&gCamLock);
1052      rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1053          &sessionId[mCameraId]);
1054      pthread_mutex_unlock(&gCamLock);
1055  
1056      if (rc < 0) {
1057          LOGE("Error, failed to get sessiion id");
1058          return UNKNOWN_ERROR;
1059      } else {
1060          //Allocate related cam sync buffer
1061          //this is needed for the payload that goes along with bundling cmd for related
1062          //camera use cases
1063          m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1064          rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
1065          if(rc != OK) {
1066              rc = NO_MEMORY;
1067              LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1068              return NO_MEMORY;
1069          }
1070  
1071          //Map memory for related cam sync buffer
1072          rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1073                  CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1074                  m_pDualCamCmdHeap->getFd(0),
1075                  sizeof(cam_dual_camera_cmd_info_t),
1076                  m_pDualCamCmdHeap->getPtr(0));
1077          if(rc < 0) {
1078              LOGE("Dualcam: failed to map Related cam sync buffer");
1079              rc = FAILED_TRANSACTION;
1080              return NO_MEMORY;
1081          }
1082          m_pDualCamCmdPtr =
1083                  (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
1084      }
1085  
1086      LOGH("mCameraId=%d",mCameraId);
1087  
1088      return NO_ERROR;
1089  }
1090  
1091  /*===========================================================================
1092   * FUNCTION   : closeCamera
1093   *
1094   * DESCRIPTION: close camera
1095   *
1096   * PARAMETERS : none
1097   *
1098   * RETURN     : int32_t type of status
1099   *              NO_ERROR  -- success
1100   *              none-zero failure code
1101   *==========================================================================*/
closeCamera()1102  int QCamera3HardwareInterface::closeCamera()
1103  {
1104      KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
1105      int rc = NO_ERROR;
1106      char value[PROPERTY_VALUE_MAX];
1107  
1108      LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1109               mCameraId);
1110  
1111      // unmap memory for related cam sync buffer
1112      mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1113              CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
1114      if (NULL != m_pDualCamCmdHeap) {
1115          m_pDualCamCmdHeap->deallocate();
1116          delete m_pDualCamCmdHeap;
1117          m_pDualCamCmdHeap = NULL;
1118          m_pDualCamCmdPtr = NULL;
1119      }
1120  
1121      rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1122      mCameraHandle = NULL;
1123  
1124      //reset session id to some invalid id
1125      pthread_mutex_lock(&gCamLock);
1126      sessionId[mCameraId] = 0xDEADBEEF;
1127      pthread_mutex_unlock(&gCamLock);
1128  
1129      //Notify display HAL that there is no active camera session
1130      //but avoid calling the same during bootup. Refer to openCamera
1131      //for more details.
1132      property_get("service.bootanim.exit", value, "0");
1133      if (atoi(value) == 1) {
1134          pthread_mutex_lock(&gCamLock);
1135          if (--gNumCameraSessions == 0) {
1136              setCameraLaunchStatus(false);
1137          }
1138          pthread_mutex_unlock(&gCamLock);
1139      }
1140  
1141      if (mExifParams.debug_params) {
1142          free(mExifParams.debug_params);
1143          mExifParams.debug_params = NULL;
1144      }
1145      if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1146          LOGW("Failed to release flash for camera id: %d",
1147                  mCameraId);
1148      }
1149      mState = CLOSED;
1150      LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1151           mCameraId, rc);
1152  
1153      {
1154          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1155          if (EaselManagerClientOpened) {
1156              if (gActiveEaselClient == 1) {
1157                  rc = gEaselManagerClient->suspend();
1158                  if (rc != 0) {
1159                      ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1160                  }
1161              }
1162              gActiveEaselClient--;
1163          }
1164  
1165          if (mQCamera3HdrPlusListenerThread != nullptr) {
1166              mQCamera3HdrPlusListenerThread->requestExit();
1167              mQCamera3HdrPlusListenerThread->join();
1168              mQCamera3HdrPlusListenerThread = nullptr;
1169          }
1170      }
1171  
1172      return rc;
1173  }
1174  
1175  /*===========================================================================
1176   * FUNCTION   : initialize
1177   *
1178   * DESCRIPTION: Initialize frameworks callback functions
1179   *
1180   * PARAMETERS :
1181   *   @callback_ops : callback function to frameworks
1182   *
1183   * RETURN     :
1184   *
1185   *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)1186  int QCamera3HardwareInterface::initialize(
1187          const struct camera3_callback_ops *callback_ops)
1188  {
1189      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
1190      int rc;
1191  
1192      LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1193      pthread_mutex_lock(&mMutex);
1194  
1195      // Validate current state
1196      switch (mState) {
1197          case OPENED:
1198              /* valid state */
1199              break;
1200          default:
1201              LOGE("Invalid state %d", mState);
1202              rc = -ENODEV;
1203              goto err1;
1204      }
1205  
1206      rc = initParameters();
1207      if (rc < 0) {
1208          LOGE("initParamters failed %d", rc);
1209          goto err1;
1210      }
1211      mCallbackOps = callback_ops;
1212  
1213      mChannelHandle = mCameraHandle->ops->add_channel(
1214              mCameraHandle->camera_handle, NULL, NULL, this);
1215      if (mChannelHandle == 0) {
1216          LOGE("add_channel failed");
1217          rc = -ENOMEM;
1218          pthread_mutex_unlock(&mMutex);
1219          return rc;
1220      }
1221  
1222      pthread_mutex_unlock(&mMutex);
1223      mCameraInitialized = true;
1224      mState = INITIALIZED;
1225      LOGI("X");
1226      return 0;
1227  
1228  err1:
1229      pthread_mutex_unlock(&mMutex);
1230      return rc;
1231  }
1232  
1233  /*===========================================================================
1234   * FUNCTION   : validateStreamDimensions
1235   *
1236   * DESCRIPTION: Check if the configuration requested are those advertised
1237   *
1238   * PARAMETERS :
1239   *   @stream_list : streams to be configured
1240   *
1241   * RETURN     :
1242   *
1243   *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)1244  int QCamera3HardwareInterface::validateStreamDimensions(
1245          camera3_stream_configuration_t *streamList)
1246  {
1247      int rc = NO_ERROR;
1248      size_t count = 0;
1249      uint32_t depthWidth = 0;
1250      uint32_t depthHeight = 0;
1251      if (mPDSupported) {
1252          depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1253          depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1254      }
1255  
1256      camera3_stream_t *inputStream = NULL;
1257      /*
1258      * Loop through all streams to find input stream if it exists*
1259      */
1260      for (size_t i = 0; i< streamList->num_streams; i++) {
1261          if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1262              if (inputStream != NULL) {
1263                  LOGE("Error, Multiple input streams requested");
1264                  return -EINVAL;
1265              }
1266              inputStream = streamList->streams[i];
1267          }
1268      }
1269      /*
1270      * Loop through all streams requested in configuration
1271      * Check if unsupported sizes have been requested on any of them
1272      */
1273      for (size_t j = 0; j < streamList->num_streams; j++) {
1274          bool sizeFound = false;
1275          camera3_stream_t *newStream = streamList->streams[j];
1276  
1277          uint32_t rotatedHeight = newStream->height;
1278          uint32_t rotatedWidth = newStream->width;
1279          if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1280                  (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1281              rotatedHeight = newStream->width;
1282              rotatedWidth = newStream->height;
1283          }
1284  
1285          /*
1286          * Sizes are different for each type of stream format check against
1287          * appropriate table.
1288          */
1289          switch (newStream->format) {
1290          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1291          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1292          case HAL_PIXEL_FORMAT_RAW10:
1293              if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1294                      (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1295                      mPDSupported) {
1296                  if ((depthWidth == newStream->width) &&
1297                          (depthHeight == newStream->height)) {
1298                      sizeFound = true;
1299                  }
1300                  break;
1301              }
1302              count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1303              for (size_t i = 0; i < count; i++) {
1304                  if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1305                          (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1306                      sizeFound = true;
1307                      break;
1308                  }
1309              }
1310              break;
1311          case HAL_PIXEL_FORMAT_BLOB:
1312              if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1313                      mPDSupported) {
1314                  //As per spec. depth cloud should be sample count / 16
1315                  uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
1316                  if ((depthSamplesCount == newStream->width) &&
1317                          (1 == newStream->height)) {
1318                      sizeFound = true;
1319                  }
1320                  break;
1321              }
1322              count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1323              /* Verify set size against generated sizes table */
1324              for (size_t i = 0; i < count; i++) {
1325                  if (((int32_t)rotatedWidth ==
1326                          gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1327                          ((int32_t)rotatedHeight ==
1328                          gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1329                      sizeFound = true;
1330                      break;
1331                  }
1332              }
1333              break;
1334          case HAL_PIXEL_FORMAT_YCbCr_420_888:
1335          case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1336          default:
1337              if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1338                      || newStream->stream_type == CAMERA3_STREAM_INPUT
1339                      || IS_USAGE_ZSL(newStream->usage)) {
1340                  if (((int32_t)rotatedWidth ==
1341                                  gCamCapability[mCameraId]->active_array_size.width) &&
1342                                  ((int32_t)rotatedHeight ==
1343                                  gCamCapability[mCameraId]->active_array_size.height)) {
1344                      sizeFound = true;
1345                      break;
1346                  }
1347                  /* We could potentially break here to enforce ZSL stream
1348                   * set from frameworks always is full active array size
1349                   * but it is not clear from the spc if framework will always
1350                   * follow that, also we have logic to override to full array
1351                   * size, so keeping the logic lenient at the moment
1352                   */
1353              }
1354              count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1355                      MAX_SIZES_CNT);
1356              for (size_t i = 0; i < count; i++) {
1357                  if (((int32_t)rotatedWidth ==
1358                              gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1359                              ((int32_t)rotatedHeight ==
1360                              gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1361                      sizeFound = true;
1362                      break;
1363                  }
1364              }
1365              break;
1366          } /* End of switch(newStream->format) */
1367  
1368          /* We error out even if a single stream has unsupported size set */
1369          if (!sizeFound) {
1370              LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1371                      rotatedWidth, rotatedHeight, newStream->format,
1372                      gCamCapability[mCameraId]->active_array_size.width,
1373                      gCamCapability[mCameraId]->active_array_size.height);
1374              rc = -EINVAL;
1375              break;
1376          }
1377      } /* End of for each stream */
1378      return rc;
1379  }
1380  
1381  /*===========================================================================
1382   * FUNCTION   : validateUsageFlags
1383   *
1384   * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1385   *
1386   * PARAMETERS :
1387   *   @stream_list : streams to be configured
1388   *
1389   * RETURN     :
1390   *   NO_ERROR if the usage flags are supported
1391   *   error code if usage flags are not supported
1392   *
1393   *==========================================================================*/
validateUsageFlags(const camera3_stream_configuration_t * streamList)1394  int QCamera3HardwareInterface::validateUsageFlags(
1395          const camera3_stream_configuration_t* streamList)
1396  {
1397      for (size_t j = 0; j < streamList->num_streams; j++) {
1398          const camera3_stream_t *newStream = streamList->streams[j];
1399  
1400          if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1401              (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1402               newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1403              continue;
1404          }
1405  
1406          // Here we only care whether it's EIS3 or not
1407          char is_type_value[PROPERTY_VALUE_MAX];
1408          property_get("persist.camera.is_type", is_type_value, "4");
1409          cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1410          if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1411                  mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1412              isType = IS_TYPE_NONE;
1413  
1414          bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1415          bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1416          bool isZSL = IS_USAGE_ZSL(newStream->usage);
1417          bool forcePreviewUBWC = true;
1418          if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1419              forcePreviewUBWC = false;
1420          }
1421          cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1422                  CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
1423          cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1424                  CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
1425          cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1426                  CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
1427  
1428          // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1429          // So color spaces will always match.
1430  
1431          // Check whether underlying formats of shared streams match.
1432          if (isVideo && isPreview && videoFormat != previewFormat) {
1433              LOGE("Combined video and preview usage flag is not supported");
1434              return -EINVAL;
1435          }
1436          if (isPreview && isZSL && previewFormat != zslFormat) {
1437              LOGE("Combined preview and zsl usage flag is not supported");
1438              return -EINVAL;
1439          }
1440          if (isVideo && isZSL && videoFormat != zslFormat) {
1441              LOGE("Combined video and zsl usage flag is not supported");
1442              return -EINVAL;
1443          }
1444      }
1445      return NO_ERROR;
1446  }
1447  
1448  /*===========================================================================
1449   * FUNCTION   : validateUsageFlagsForEis
1450   *
1451   * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1452   *
1453   * PARAMETERS :
1454   *   @stream_list : streams to be configured
1455   *
1456   * RETURN     :
1457   *   NO_ERROR if the usage flags are supported
1458   *   error code if usage flags are not supported
1459   *
1460   *==========================================================================*/
validateUsageFlagsForEis(const camera3_stream_configuration_t * streamList)1461  int QCamera3HardwareInterface::validateUsageFlagsForEis(
1462          const camera3_stream_configuration_t* streamList)
1463  {
1464      for (size_t j = 0; j < streamList->num_streams; j++) {
1465          const camera3_stream_t *newStream = streamList->streams[j];
1466  
1467          bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1468          bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1469  
1470          // Because EIS is "hard-coded" for certain use case, and current
1471         // implementation doesn't support shared preview and video on the same
1472          // stream, return failure if EIS is forced on.
1473          if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1474              LOGE("Combined video and preview usage flag is not supported due to EIS");
1475              return -EINVAL;
1476          }
1477      }
1478      return NO_ERROR;
1479  }
1480  
1481  /*==============================================================================
1482   * FUNCTION   : isSupportChannelNeeded
1483   *
1484   * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1485   *
1486   * PARAMETERS :
1487   *   @stream_list : streams to be configured
1488   *   @stream_config_info : the config info for streams to be configured
1489   *
1490   * RETURN     : Boolen true/false decision
1491   *
1492   *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1493  bool QCamera3HardwareInterface::isSupportChannelNeeded(
1494          camera3_stream_configuration_t *streamList,
1495          cam_stream_size_info_t stream_config_info)
1496  {
1497      uint32_t i;
1498      bool pprocRequested = false;
1499      /* Check for conditions where PProc pipeline does not have any streams*/
1500      for (i = 0; i < stream_config_info.num_streams; i++) {
1501          if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1502                  stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1503              pprocRequested = true;
1504              break;
1505          }
1506      }
1507  
1508      if (pprocRequested == false )
1509          return true;
1510  
1511      /* Dummy stream needed if only raw or jpeg streams present */
1512      for (i = 0; i < streamList->num_streams; i++) {
1513          switch(streamList->streams[i]->format) {
1514              case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1515              case HAL_PIXEL_FORMAT_RAW10:
1516              case HAL_PIXEL_FORMAT_RAW16:
1517              case HAL_PIXEL_FORMAT_BLOB:
1518                  break;
1519              default:
1520                  return false;
1521          }
1522      }
1523      return true;
1524  }
1525  
1526  /*==============================================================================
1527   * FUNCTION   : sensor_mode_info
1528   *
1529   * DESCRIPTION: Get sensor mode information based on current stream configuratoin
1530   *
1531   * PARAMETERS :
1532   *   @sensor_mode_info : sensor mode information (output)
1533   *
1534   * RETURN     : int32_t type of status
1535   *              NO_ERROR  -- success
1536   *              none-zero failure code
1537   *
1538   *==========================================================================*/
getSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1539  int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1540  {
1541      int32_t rc = NO_ERROR;
1542  
1543      cam_dimension_t max_dim = {0, 0};
1544      for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1545          if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1546              max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1547          if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1548              max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1549      }
1550  
1551      clear_metadata_buffer(mParameters);
1552  
1553      rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1554              max_dim);
1555      if (rc != NO_ERROR) {
1556          LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1557          return rc;
1558      }
1559  
1560      rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1561      if (rc != NO_ERROR) {
1562          LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1563          return rc;
1564      }
1565  
1566      clear_metadata_buffer(mParameters);
1567      ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
1568  
1569      rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1570              mParameters);
1571      if (rc != NO_ERROR) {
1572          LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1573          return rc;
1574      }
1575  
1576      READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1577      LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1578              "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1579              sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1580              sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1581              sensorModeInfo.num_raw_bits);
1582  
1583      return rc;
1584  }
1585  
1586  /*==============================================================================
1587   * FUNCTION   : getCurrentSensorModeInfo
1588   *
1589   * DESCRIPTION: Get sensor mode information that is currently selected.
1590   *
1591   * PARAMETERS :
1592   *   @sensorModeInfo : sensor mode information (output)
1593   *
1594   * RETURN     : int32_t type of status
1595   *              NO_ERROR  -- success
1596   *              none-zero failure code
1597   *
1598   *==========================================================================*/
getCurrentSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1599  int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1600  {
1601      int32_t rc = NO_ERROR;
1602  
1603      clear_metadata_buffer(mParameters);
1604      ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1605  
1606      rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1607              mParameters);
1608      if (rc != NO_ERROR) {
1609          LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1610          return rc;
1611      }
1612  
1613      READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1614      LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1615              "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1616              sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1617              sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1618              sensorModeInfo.num_raw_bits);
1619  
1620      return rc;
1621  }
1622  
1623  /*==============================================================================
1624   * FUNCTION   : addToPPFeatureMask
1625   *
1626   * DESCRIPTION: add additional features to pp feature mask based on
1627   *              stream type and usecase
1628   *
1629   * PARAMETERS :
1630   *   @stream_format : stream type for feature mask
1631   *   @stream_idx : stream idx within postprocess_mask list to change
1632   *
1633   * RETURN     : NULL
1634   *
1635   *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1636  void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1637          uint32_t stream_idx)
1638  {
1639      char feature_mask_value[PROPERTY_VALUE_MAX];
1640      cam_feature_mask_t feature_mask;
1641      int args_converted;
1642      int property_len;
1643  
1644      /* Get feature mask from property */
1645  #ifdef _LE_CAMERA_
1646      char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1647      snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1648      property_len = property_get("persist.camera.hal3.feature",
1649              feature_mask_value, swtnr_feature_mask_value);
1650  #else
1651      property_len = property_get("persist.camera.hal3.feature",
1652              feature_mask_value, "0");
1653  #endif
1654      if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1655              (feature_mask_value[1] == 'x')) {
1656          args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1657      } else {
1658          args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1659      }
1660      if (1 != args_converted) {
1661          feature_mask = 0;
1662          LOGE("Wrong feature mask %s", feature_mask_value);
1663          return;
1664      }
1665  
1666      switch (stream_format) {
1667      case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1668          /* Add LLVD to pp feature mask only if video hint is enabled */
1669          if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1670              mStreamConfigInfo.postprocess_mask[stream_idx]
1671                      |= CAM_QTI_FEATURE_SW_TNR;
1672              LOGH("Added SW TNR to pp feature mask");
1673          } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1674              mStreamConfigInfo.postprocess_mask[stream_idx]
1675                      |= CAM_QCOM_FEATURE_LLVD;
1676              LOGH("Added LLVD SeeMore to pp feature mask");
1677          }
1678          if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1679                  CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1680              mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1681          }
1682          if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1683                  CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1684              mStreamConfigInfo.postprocess_mask[stream_idx] |=
1685                      CAM_QTI_FEATURE_BINNING_CORRECTION;
1686          }
1687          break;
1688      }
1689      default:
1690          break;
1691      }
1692      LOGD("PP feature mask %llx",
1693              mStreamConfigInfo.postprocess_mask[stream_idx]);
1694  }
1695  
1696  /*==============================================================================
1697   * FUNCTION   : updateFpsInPreviewBuffer
1698   *
1699   * DESCRIPTION: update FPS information in preview buffer.
1700   *
1701   * PARAMETERS :
1702   *   @metadata    : pointer to metadata buffer
1703   *   @frame_number: frame_number to look for in pending buffer list
1704   *
1705   * RETURN     : None
1706   *
1707   *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1708  void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1709          uint32_t frame_number)
1710  {
1711      // Mark all pending buffers for this particular request
1712      // with corresponding framerate information
1713      for (List<PendingBuffersInRequest>::iterator req =
1714              mPendingBuffersMap.mPendingBuffersInRequest.begin();
1715              req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1716          for(List<PendingBufferInfo>::iterator j =
1717                  req->mPendingBufferList.begin();
1718                  j != req->mPendingBufferList.end(); j++) {
1719              QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1720              if ((req->frame_number == frame_number) &&
1721                  (channel->getStreamTypeMask() &
1722                  (1U << CAM_STREAM_TYPE_PREVIEW))) {
1723                  IF_META_AVAILABLE(cam_fps_range_t, float_range,
1724                      CAM_INTF_PARM_FPS_RANGE, metadata) {
1725                      typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1726                      struct private_handle_t *priv_handle =
1727                          (struct private_handle_t *)(*(j->buffer));
1728                      setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1729                  }
1730              }
1731          }
1732      }
1733  }
1734  
1735  /*==============================================================================
1736   * FUNCTION   : updateTimeStampInPendingBuffers
1737   *
1738   * DESCRIPTION: update timestamp in display metadata for all pending buffers
1739   *              of a frame number
1740   *
1741   * PARAMETERS :
1742   *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1743   *   @timestamp   : timestamp to be set
1744   *
1745   * RETURN     : None
1746   *
1747   *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1748  void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1749          uint32_t frameNumber, nsecs_t timestamp)
1750  {
1751      for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1752              req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1753          // WAR: save the av_timestamp to the next frame
1754          if(req->frame_number == frameNumber + 1) {
1755              req->av_timestamp = timestamp;
1756          }
1757  
1758          if (req->frame_number != frameNumber)
1759              continue;
1760  
1761          for (auto k = req->mPendingBufferList.begin();
1762                  k != req->mPendingBufferList.end(); k++ ) {
1763              // WAR: update timestamp when it's not VT usecase
1764              QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1765              if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1766                  m_bAVTimerEnabled)) {
1767                      struct private_handle_t *priv_handle =
1768                          (struct private_handle_t *) (*(k->buffer));
1769                      setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1770              }
1771          }
1772      }
1773      return;
1774  }
1775  
1776  /*===========================================================================
1777   * FUNCTION   : configureStreams
1778   *
1779   * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1780   *              and output streams.
1781   *
1782   * PARAMETERS :
1783   *   @stream_list : streams to be configured
1784   *
1785   * RETURN     :
1786   *
1787   *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1788  int QCamera3HardwareInterface::configureStreams(
1789          camera3_stream_configuration_t *streamList)
1790  {
1791      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
1792      int rc = 0;
1793  
1794      // Acquire perfLock before configure streams
1795      mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
1796      rc = configureStreamsPerfLocked(streamList);
1797      mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
1798  
1799      return rc;
1800  }
1801  
1802  /*===========================================================================
1803   * FUNCTION   : configureStreamsPerfLocked
1804   *
1805   * DESCRIPTION: configureStreams while perfLock is held.
1806   *
1807   * PARAMETERS :
1808   *   @stream_list : streams to be configured
1809   *
1810   * RETURN     : int32_t type of status
1811   *              NO_ERROR  -- success
1812   *              none-zero failure code
1813   *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1814  int QCamera3HardwareInterface::configureStreamsPerfLocked(
1815          camera3_stream_configuration_t *streamList)
1816  {
1817      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
1818      int rc = 0;
1819  
1820      // Sanity check stream_list
1821      if (streamList == NULL) {
1822          LOGE("NULL stream configuration");
1823          return BAD_VALUE;
1824      }
1825      if (streamList->streams == NULL) {
1826          LOGE("NULL stream list");
1827          return BAD_VALUE;
1828      }
1829  
1830      if (streamList->num_streams < 1) {
1831          LOGE("Bad number of streams requested: %d",
1832                  streamList->num_streams);
1833          return BAD_VALUE;
1834      }
1835  
1836      if (streamList->num_streams >= MAX_NUM_STREAMS) {
1837          LOGE("Maximum number of streams %d exceeded: %d",
1838                  MAX_NUM_STREAMS, streamList->num_streams);
1839          return BAD_VALUE;
1840      }
1841  
1842      mOpMode = streamList->operation_mode;
1843      LOGD("mOpMode: %d", mOpMode);
1844  
1845      rc = validateUsageFlags(streamList);
1846      if (rc != NO_ERROR) {
1847          return rc;
1848      }
1849  
1850      // Disable HDR+ if it's enabled;
1851      {
1852          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1853          finishHdrPlusClientOpeningLocked(l);
1854          disableHdrPlusModeLocked();
1855      }
1856  
1857      /* first invalidate all the steams in the mStreamList
1858       * if they appear again, they will be validated */
1859      for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1860              it != mStreamInfo.end(); it++) {
1861          QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1862          if (channel) {
1863            channel->stop();
1864          }
1865          (*it)->status = INVALID;
1866      }
1867  
1868      if (mRawDumpChannel) {
1869          mRawDumpChannel->stop();
1870          delete mRawDumpChannel;
1871          mRawDumpChannel = NULL;
1872      }
1873  
1874      if (mHdrPlusRawSrcChannel) {
1875          mHdrPlusRawSrcChannel->stop();
1876          delete mHdrPlusRawSrcChannel;
1877          mHdrPlusRawSrcChannel = NULL;
1878      }
1879  
1880      if (mSupportChannel)
1881          mSupportChannel->stop();
1882  
1883      if (mAnalysisChannel) {
1884          mAnalysisChannel->stop();
1885      }
1886      if (mMetadataChannel) {
1887          /* If content of mStreamInfo is not 0, there is metadata stream */
1888          mMetadataChannel->stop();
1889      }
1890      if (mChannelHandle) {
1891          stopChannelLocked(/*stop_immediately*/false);
1892      }
1893  
1894      pthread_mutex_lock(&mMutex);
1895  
1896      mPictureChannel = NULL;
1897  
1898      // Check state
1899      switch (mState) {
1900          case INITIALIZED:
1901          case CONFIGURED:
1902          case STARTED:
1903              /* valid state */
1904              break;
1905          default:
1906              LOGE("Invalid state %d", mState);
1907              pthread_mutex_unlock(&mMutex);
1908              return -ENODEV;
1909      }
1910  
1911      /* Check whether we have video stream */
1912      m_bIs4KVideo = false;
1913      m_bIsVideo = false;
1914      m_bEisSupportedSize = true;
1915      m_bTnrEnabled = false;
1916      m_bVideoHdrEnabled = false;
1917      bool isZsl = false;
1918      bool depthPresent = false;
1919      bool isPreview = false;
1920      uint32_t videoWidth = 0U;
1921      uint32_t videoHeight = 0U;
1922      size_t rawStreamCnt = 0;
1923      size_t stallStreamCnt = 0;
1924      size_t processedStreamCnt = 0;
1925      // Number of streams on ISP encoder path
1926      size_t numStreamsOnEncoder = 0;
1927      size_t numYuv888OnEncoder = 0;
1928      bool bYuv888OverrideJpeg = false;
1929      cam_dimension_t largeYuv888Size = {0, 0};
1930      cam_dimension_t maxViewfinderSize = {0, 0};
1931      bool bJpegExceeds4K = false;
1932      bool bJpegOnEncoder = false;
1933      bool bUseCommonFeatureMask = false;
1934      cam_feature_mask_t commonFeatureMask = 0;
1935      bool bSmallJpegSize = false;
1936      uint32_t width_ratio;
1937      uint32_t height_ratio;
1938      maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1939      camera3_stream_t *inputStream = NULL;
1940      bool isJpeg = false;
1941      cam_dimension_t jpegSize = {0, 0};
1942      cam_dimension_t previewSize = {0, 0};
1943      size_t pdStatCount = 0;
1944  
1945      cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1946  
1947      /*EIS configuration*/
1948      uint8_t eis_prop_set;
1949      uint32_t maxEisWidth = 0;
1950      uint32_t maxEisHeight = 0;
1951  
1952      // Initialize all instant AEC related variables
1953      mInstantAEC = false;
1954      mResetInstantAEC = false;
1955      mInstantAECSettledFrameNumber = 0;
1956      mAecSkipDisplayFrameBound = 0;
1957      mInstantAecFrameIdxCount = 0;
1958      mCurrFeatureState = 0;
1959      mStreamConfig = true;
1960  
1961      m_bAVTimerEnabled = false;
1962  
1963      memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1964  
1965      size_t count = IS_TYPE_MAX;
1966      count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1967      for (size_t i = 0; i < count; i++) {
1968          if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1969              (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1970              m_bEisSupported = true;
1971              break;
1972          }
1973      }
1974  
1975      if (m_bEisSupported) {
1976          maxEisWidth = MAX_EIS_WIDTH;
1977          maxEisHeight = MAX_EIS_HEIGHT;
1978      }
1979  
1980      /* EIS setprop control */
1981      char eis_prop[PROPERTY_VALUE_MAX];
1982      memset(eis_prop, 0, sizeof(eis_prop));
1983      property_get("persist.camera.eis.enable", eis_prop, "1");
1984      eis_prop_set = (uint8_t)atoi(eis_prop);
1985  
1986      m_bEisEnable = eis_prop_set && m_bEisSupported &&
1987              (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1988              (gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
1989               gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
1990  
1991      LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1992              m_bEisEnable, eis_prop_set, m_bEisSupported);
1993  
1994      /* stream configurations */
1995      for (size_t i = 0; i < streamList->num_streams; i++) {
1996          camera3_stream_t *newStream = streamList->streams[i];
1997          LOGI("stream[%d] type = %d, format = %d, width = %d, "
1998                  "height = %d, rotation = %d, usage = 0x%x",
1999                   i, newStream->stream_type, newStream->format,
2000                  newStream->width, newStream->height, newStream->rotation,
2001                  newStream->usage);
2002          if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2003                  newStream->stream_type == CAMERA3_STREAM_INPUT){
2004              isZsl = true;
2005          }
2006          if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
2007                  IS_USAGE_PREVIEW(newStream->usage)) {
2008              isPreview = true;
2009          }
2010  
2011          if (newStream->stream_type == CAMERA3_STREAM_INPUT){
2012              inputStream = newStream;
2013          }
2014  
2015          if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
2016                  (newStream->data_space != HAL_DATASPACE_DEPTH)) {
2017              isJpeg = true;
2018              jpegSize.width = newStream->width;
2019              jpegSize.height = newStream->height;
2020              if (newStream->width > VIDEO_4K_WIDTH ||
2021                      newStream->height > VIDEO_4K_HEIGHT)
2022                  bJpegExceeds4K = true;
2023          }
2024  
2025          if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
2026                  (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
2027              if (IS_USAGE_VIDEO(newStream->usage)) {
2028                  m_bIsVideo = true;
2029                  // In HAL3 we can have multiple different video streams.
2030                  // The variables video width and height are used below as
2031                  // dimensions of the biggest of them
2032                  if (videoWidth < newStream->width || videoHeight < newStream->height) {
2033                      videoWidth = newStream->width;
2034                      videoHeight = newStream->height;
2035                  }
2036                  if ((VIDEO_4K_WIDTH <= newStream->width) &&
2037                          (VIDEO_4K_HEIGHT <= newStream->height)) {
2038                      m_bIs4KVideo = true;
2039                  }
2040              }
2041              m_bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
2042                                    (newStream->height <= maxEisHeight);
2043          }
2044          if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2045                  newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2046              switch (newStream->format) {
2047              case HAL_PIXEL_FORMAT_BLOB:
2048                  if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2049                      depthPresent = true;
2050                      break;
2051                  }
2052                  stallStreamCnt++;
2053                  if (isOnEncoder(maxViewfinderSize, newStream->width,
2054                          newStream->height)) {
2055                      numStreamsOnEncoder++;
2056                      bJpegOnEncoder = true;
2057                  }
2058                  width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2059                          newStream->width);
2060                  height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2061                          newStream->height);;
2062                  FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2063                          "FATAL: max_downscale_factor cannot be zero and so assert");
2064                  if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2065                      (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2066                      LOGH("Setting small jpeg size flag to true");
2067                      bSmallJpegSize = true;
2068                  }
2069                  break;
2070              case HAL_PIXEL_FORMAT_RAW10:
2071              case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2072              case HAL_PIXEL_FORMAT_RAW16:
2073                  rawStreamCnt++;
2074                  if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2075                          (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2076                      pdStatCount++;
2077                  }
2078                  break;
2079              case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2080                  processedStreamCnt++;
2081                  if (isOnEncoder(maxViewfinderSize, newStream->width,
2082                          newStream->height)) {
2083                      if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2084                              !IS_USAGE_ZSL(newStream->usage)) {
2085                          commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2086                      }
2087                      numStreamsOnEncoder++;
2088                  }
2089                  break;
2090              case HAL_PIXEL_FORMAT_YCbCr_420_888:
2091                  processedStreamCnt++;
2092                  if (isOnEncoder(maxViewfinderSize, newStream->width,
2093                          newStream->height)) {
2094                      // If Yuv888 size is not greater than 4K, set feature mask
2095                      // to SUPERSET so that it support concurrent request on
2096                      // YUV and JPEG.
2097                      if (newStream->width <= VIDEO_4K_WIDTH &&
2098                              newStream->height <= VIDEO_4K_HEIGHT) {
2099                          commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2100                      }
2101                      numStreamsOnEncoder++;
2102                      numYuv888OnEncoder++;
2103                      largeYuv888Size.width = newStream->width;
2104                      largeYuv888Size.height = newStream->height;
2105                  }
2106                  break;
2107              default:
2108                  processedStreamCnt++;
2109                  if (isOnEncoder(maxViewfinderSize, newStream->width,
2110                          newStream->height)) {
2111                      commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2112                      numStreamsOnEncoder++;
2113                  }
2114                  break;
2115              }
2116  
2117          }
2118      }
2119  
2120      if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2121          pthread_mutex_unlock(&mMutex);
2122          return -EINVAL;
2123      }
2124  
2125      uint8_t forceEnableTnr = 0;
2126      char tnr_prop[PROPERTY_VALUE_MAX];
2127      memset(tnr_prop, 0, sizeof(tnr_prop));
2128      property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2129      forceEnableTnr = (uint8_t)atoi(tnr_prop);
2130  
2131      /* Logic to enable/disable TNR based on specific config size/etc.*/
2132      if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
2133              (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2134          m_bTnrEnabled = true;
2135      else if (forceEnableTnr)
2136          m_bTnrEnabled = true;
2137  
2138      char videoHdrProp[PROPERTY_VALUE_MAX];
2139      memset(videoHdrProp, 0, sizeof(videoHdrProp));
2140      property_get("persist.camera.hdr.video", videoHdrProp, "0");
2141      uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2142  
2143      if (hdr_mode_prop == 1 && m_bIsVideo &&
2144              mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2145          m_bVideoHdrEnabled = true;
2146      else
2147          m_bVideoHdrEnabled = false;
2148  
2149  
2150      /* Check if num_streams is sane */
2151      if (stallStreamCnt > MAX_STALLING_STREAMS ||
2152              rawStreamCnt > MAX_RAW_STREAMS ||
2153              processedStreamCnt > MAX_PROCESSED_STREAMS) {
2154          LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2155                   stallStreamCnt, rawStreamCnt, processedStreamCnt);
2156          pthread_mutex_unlock(&mMutex);
2157          return -EINVAL;
2158      }
2159      /* Check whether we have zsl stream or 4k video case */
2160      if (isZsl && m_bIs4KVideo) {
2161          LOGE("Currently invalid configuration ZSL & 4K Video!");
2162          pthread_mutex_unlock(&mMutex);
2163          return -EINVAL;
2164      }
2165      /* Check if stream sizes are sane */
2166      if (numStreamsOnEncoder > 2) {
2167          LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2168          pthread_mutex_unlock(&mMutex);
2169          return -EINVAL;
2170      } else if (1 < numStreamsOnEncoder){
2171          bUseCommonFeatureMask = true;
2172          LOGH("Multiple streams above max viewfinder size, common mask needed");
2173      }
2174  
2175      /* Check if BLOB size is greater than 4k in 4k recording case */
2176      if (m_bIs4KVideo && bJpegExceeds4K) {
2177          LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2178          pthread_mutex_unlock(&mMutex);
2179          return -EINVAL;
2180      }
2181  
2182      if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2183              depthPresent) {
2184          LOGE("HAL doesn't support depth streams in HFR mode!");
2185          pthread_mutex_unlock(&mMutex);
2186          return -EINVAL;
2187      }
2188  
2189      // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2190      // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2191      // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2192      // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2193      // configurations:
2194      //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2195      //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2196      //    (These two configurations will not have CAC2 enabled even in HQ modes.)
2197      if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2198          ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2199                  __func__);
2200          pthread_mutex_unlock(&mMutex);
2201          return -EINVAL;
2202      }
2203  
2204      // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2205      // the YUV stream's size is greater or equal to the JPEG size, set common
2206      // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2207      if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2208              jpegSize.width, jpegSize.height) &&
2209              largeYuv888Size.width > jpegSize.width &&
2210              largeYuv888Size.height > jpegSize.height) {
2211          bYuv888OverrideJpeg = true;
2212      } else if (!isJpeg && numStreamsOnEncoder > 1) {
2213          commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2214      }
2215  
2216      LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2217              maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2218              commonFeatureMask);
2219      LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2220              numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2221  
2222      rc = validateStreamDimensions(streamList);
2223      if (rc == NO_ERROR) {
2224          rc = validateStreamRotations(streamList);
2225      }
2226      if (rc != NO_ERROR) {
2227          LOGE("Invalid stream configuration requested!");
2228          pthread_mutex_unlock(&mMutex);
2229          return rc;
2230      }
2231  
2232      if (1 < pdStatCount) {
2233          LOGE("HAL doesn't support multiple PD streams");
2234          pthread_mutex_unlock(&mMutex);
2235          return -EINVAL;
2236      }
2237  
2238      if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2239              (1 == pdStatCount)) {
2240          LOGE("HAL doesn't support PD streams in HFR mode!");
2241          pthread_mutex_unlock(&mMutex);
2242          return -EINVAL;
2243      }
2244  
2245      camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2246      for (size_t i = 0; i < streamList->num_streams; i++) {
2247          camera3_stream_t *newStream = streamList->streams[i];
2248          LOGH("newStream type = %d, stream format = %d "
2249                  "stream size : %d x %d, stream rotation = %d",
2250                   newStream->stream_type, newStream->format,
2251                  newStream->width, newStream->height, newStream->rotation);
2252          //if the stream is in the mStreamList validate it
2253          bool stream_exists = false;
2254          for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2255                  it != mStreamInfo.end(); it++) {
2256              if ((*it)->stream == newStream) {
2257                  QCamera3ProcessingChannel *channel =
2258                      (QCamera3ProcessingChannel*)(*it)->stream->priv;
2259                  stream_exists = true;
2260                  if (channel)
2261                      delete channel;
2262                  (*it)->status = VALID;
2263                  (*it)->stream->priv = NULL;
2264                  (*it)->channel = NULL;
2265              }
2266          }
2267          if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2268              //new stream
2269              stream_info_t* stream_info;
2270              stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2271              if (!stream_info) {
2272                 LOGE("Could not allocate stream info");
2273                 rc = -ENOMEM;
2274                 pthread_mutex_unlock(&mMutex);
2275                 return rc;
2276              }
2277              stream_info->stream = newStream;
2278              stream_info->status = VALID;
2279              stream_info->channel = NULL;
2280              stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
2281              mStreamInfo.push_back(stream_info);
2282          }
2283          /* Covers Opaque ZSL and API1 F/W ZSL */
2284          if (IS_USAGE_ZSL(newStream->usage)
2285                  || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2286              if (zslStream != NULL) {
2287                  LOGE("Multiple input/reprocess streams requested!");
2288                  pthread_mutex_unlock(&mMutex);
2289                  return BAD_VALUE;
2290              }
2291              zslStream = newStream;
2292          }
2293          /* Covers YUV reprocess */
2294          if (inputStream != NULL) {
2295              if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2296                      && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2297                      && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2298                      && inputStream->width == newStream->width
2299                      && inputStream->height == newStream->height) {
2300                  if (zslStream != NULL) {
2301                      /* This scenario indicates multiple YUV streams with same size
2302                       * as input stream have been requested, since zsl stream handle
2303                       * is solely use for the purpose of overriding the size of streams
2304                       * which share h/w streams we will just make a guess here as to
2305                       * which of the stream is a ZSL stream, this will be refactored
2306                       * once we make generic logic for streams sharing encoder output
2307                       */
2308                      LOGH("Warning, Multiple ip/reprocess streams requested!");
2309                  }
2310                  zslStream = newStream;
2311              }
2312          }
2313      }
2314  
2315      /* If a zsl stream is set, we know that we have configured at least one input or
2316         bidirectional stream */
2317      if (NULL != zslStream) {
2318          mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2319          mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2320          mInputStreamInfo.format = zslStream->format;
2321          mInputStreamInfo.usage = zslStream->usage;
2322          LOGD("Input stream configured! %d x %d, format %d, usage %d",
2323                   mInputStreamInfo.dim.width,
2324                  mInputStreamInfo.dim.height,
2325                  mInputStreamInfo.format, mInputStreamInfo.usage);
2326      }
2327  
2328      cleanAndSortStreamInfo();
2329      if (mMetadataChannel) {
2330          delete mMetadataChannel;
2331          mMetadataChannel = NULL;
2332      }
2333      if (mSupportChannel) {
2334          delete mSupportChannel;
2335          mSupportChannel = NULL;
2336      }
2337  
2338      if (mAnalysisChannel) {
2339          delete mAnalysisChannel;
2340          mAnalysisChannel = NULL;
2341      }
2342  
2343      if (mDummyBatchChannel) {
2344          delete mDummyBatchChannel;
2345          mDummyBatchChannel = NULL;
2346      }
2347  
2348      if (mDepthChannel) {
2349          mDepthChannel = NULL;
2350      }
2351      mDepthCloudMode = CAM_PD_DATA_SKIP;
2352  
2353      mShutterDispatcher.clear();
2354      mOutputBufferDispatcher.clear();
2355  
2356      char is_type_value[PROPERTY_VALUE_MAX];
2357      property_get("persist.camera.is_type", is_type_value, "4");
2358      m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2359  
2360      char property_value[PROPERTY_VALUE_MAX];
2361      property_get("persist.camera.gzoom.at", property_value, "0");
2362      int goog_zoom_at = atoi(property_value);
2363      bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2364          gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2365      bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2366          gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2367  
2368      property_get("persist.camera.gzoom.4k", property_value, "0");
2369      bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2370  
2371      //Create metadata channel and initialize it
2372      cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2373      setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2374              gCamCapability[mCameraId]->color_arrangement);
2375      mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2376                      mChannelHandle, mCameraHandle->ops, captureResultCb,
2377                      setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
2378      if (mMetadataChannel == NULL) {
2379          LOGE("failed to allocate metadata channel");
2380          rc = -ENOMEM;
2381          pthread_mutex_unlock(&mMutex);
2382          return rc;
2383      }
2384      mMetadataChannel->enableDepthData(depthPresent);
2385      rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2386      if (rc < 0) {
2387          LOGE("metadata channel initialization failed");
2388          delete mMetadataChannel;
2389          mMetadataChannel = NULL;
2390          pthread_mutex_unlock(&mMutex);
2391          return rc;
2392      }
2393  
2394      cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2395      bool isRawStreamRequested = false;
2396      bool onlyRaw = true;
2397      // Keep track of preview/video streams indices.
2398      // There could be more than one preview streams, but only one video stream.
2399      int32_t video_stream_idx = -1;
2400      int32_t preview_stream_idx[streamList->num_streams];
2401      size_t preview_stream_cnt = 0;
2402      bool previewTnr[streamList->num_streams];
2403      memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2404      bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2405      // Loop through once to determine preview TNR conditions before creating channels.
2406      for (size_t i = 0; i < streamList->num_streams; i++) {
2407          camera3_stream_t *newStream = streamList->streams[i];
2408          uint32_t stream_usage = newStream->usage;
2409          if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2410                  newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2411              if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2412                  video_stream_idx = (int32_t)i;
2413              else
2414                  preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2415          }
2416      }
2417      // By default, preview stream TNR is disabled.
2418      // Enable TNR to the preview stream if all conditions below are satisfied:
2419      //  1. preview resolution == video resolution.
2420      //  2. video stream TNR is enabled.
2421      //  3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2422      for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2423          camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2424          camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2425          if (m_bTnrEnabled && m_bTnrVideo &&
2426                  (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2427                  video_stream->width == preview_stream->width &&
2428                  video_stream->height == preview_stream->height) {
2429              previewTnr[preview_stream_idx[i]] = true;
2430          }
2431      }
2432  
2433      memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2434      /* Allocate channel objects for the requested streams */
2435      for (size_t i = 0; i < streamList->num_streams; i++) {
2436  
2437          camera3_stream_t *newStream = streamList->streams[i];
2438          uint32_t stream_usage = newStream->usage;
2439          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2440          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2441          struct camera_info *p_info = NULL;
2442          pthread_mutex_lock(&gCamLock);
2443          p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2444          pthread_mutex_unlock(&gCamLock);
2445          if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2446                  || IS_USAGE_ZSL(newStream->usage)) &&
2447              newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2448              onlyRaw = false; // There is non-raw stream - bypass flag if set
2449              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2450              if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2451                  if (bUseCommonFeatureMask)
2452                      zsl_ppmask = commonFeatureMask;
2453                  else
2454                      zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2455              } else {
2456                  if (numStreamsOnEncoder > 0)
2457                      zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2458                  else
2459                      zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2460              }
2461              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
2462          } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2463              onlyRaw = false; // There is non-raw stream - bypass flag if set
2464                  LOGH("Input stream configured, reprocess config");
2465          } else {
2466              //for non zsl streams find out the format
2467              switch (newStream->format) {
2468              case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2469              {
2470                  onlyRaw = false; // There is non-raw stream - bypass flag if set
2471                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2472                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2473                  /* add additional features to pp feature mask */
2474                  addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2475                          mStreamConfigInfo.num_streams);
2476  
2477                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2478                          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2479                                  CAM_STREAM_TYPE_VIDEO;
2480                      if (m_bTnrEnabled && m_bTnrVideo) {
2481                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2482                              CAM_QCOM_FEATURE_CPP_TNR;
2483                          //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2484                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2485                                  ~CAM_QCOM_FEATURE_CDS;
2486                      }
2487                      if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2488                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2489                              CAM_QTI_FEATURE_PPEISCORE;
2490                      }
2491                      if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2492                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2493                              CAM_QCOM_FEATURE_GOOG_ZOOM;
2494                      }
2495                  } else {
2496                          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2497                              CAM_STREAM_TYPE_PREVIEW;
2498                      if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
2499                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2500                                  CAM_QCOM_FEATURE_CPP_TNR;
2501                          //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2502                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2503                                  ~CAM_QCOM_FEATURE_CDS;
2504                      }
2505                      if(!m_bSwTnrPreview) {
2506                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2507                                  ~CAM_QTI_FEATURE_SW_TNR;
2508                      }
2509                      if (is_goog_zoom_preview_enabled) {
2510                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2511                              CAM_QCOM_FEATURE_GOOG_ZOOM;
2512                      }
2513                      padding_info.width_padding = mSurfaceStridePadding;
2514                      padding_info.height_padding = CAM_PAD_TO_2;
2515                      previewSize.width = (int32_t)newStream->width;
2516                      previewSize.height = (int32_t)newStream->height;
2517                  }
2518                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2519                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2520                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2521                              newStream->height;
2522                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2523                              newStream->width;
2524                  }
2525              }
2526              break;
2527              case HAL_PIXEL_FORMAT_YCbCr_420_888:
2528                  onlyRaw = false; // There is non-raw stream - bypass flag if set
2529                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2530                  if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2531                      if (bUseCommonFeatureMask)
2532                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2533                                  commonFeatureMask;
2534                      else
2535                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2536                                  CAM_QCOM_FEATURE_NONE;
2537                  } else {
2538                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2539                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2540                  }
2541              break;
2542              case HAL_PIXEL_FORMAT_BLOB:
2543                  onlyRaw = false; // There is non-raw stream - bypass flag if set
2544                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2545                  // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2546                  if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2547                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2548                               CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2549                       /* Remove rotation if it is not supported
2550                          for 4K LiveVideo snapshot case (online processing) */
2551                       if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2552                                  CAM_QCOM_FEATURE_ROTATION)) {
2553                           mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2554                                   &= ~CAM_QCOM_FEATURE_ROTATION;
2555                       }
2556                  } else {
2557                      if (bUseCommonFeatureMask &&
2558                              isOnEncoder(maxViewfinderSize, newStream->width,
2559                              newStream->height)) {
2560                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2561                      } else {
2562                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2563                      }
2564                  }
2565                  if (isZsl) {
2566                      if (zslStream) {
2567                          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2568                                  (int32_t)zslStream->width;
2569                          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2570                                  (int32_t)zslStream->height;
2571                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2572                                  zsl_ppmask;
2573                      } else {
2574                          LOGE("Error, No ZSL stream identified");
2575                          pthread_mutex_unlock(&mMutex);
2576                          return -EINVAL;
2577                      }
2578                  } else if (m_bIs4KVideo) {
2579                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2580                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2581                  } else if (bYuv888OverrideJpeg) {
2582                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2583                              (int32_t)largeYuv888Size.width;
2584                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2585                              (int32_t)largeYuv888Size.height;
2586                  }
2587                  break;
2588              case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2589              case HAL_PIXEL_FORMAT_RAW16:
2590              case HAL_PIXEL_FORMAT_RAW10:
2591                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2592                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2593                  isRawStreamRequested = true;
2594                  if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2595                          (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2596                      mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2597                              gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2598                      mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2599                              gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2600                      mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2601                              gCamCapability[mCameraId]->dt[mPDIndex];
2602                      mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2603                              gCamCapability[mCameraId]->vc[mPDIndex];
2604                  }
2605                  break;
2606              default:
2607                  onlyRaw = false; // There is non-raw stream - bypass flag if set
2608                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2609                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2610                  break;
2611              }
2612          }
2613  
2614          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2615                  (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2616                  gCamCapability[mCameraId]->color_arrangement);
2617  
2618          if (newStream->priv == NULL) {
2619              //New stream, construct channel
2620              switch (newStream->stream_type) {
2621              case CAMERA3_STREAM_INPUT:
2622                  newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2623                  newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2624                  break;
2625              case CAMERA3_STREAM_BIDIRECTIONAL:
2626                  newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2627                      GRALLOC_USAGE_HW_CAMERA_WRITE;
2628                  break;
2629              case CAMERA3_STREAM_OUTPUT:
2630                  /* For video encoding stream, set read/write rarely
2631                   * flag so that they may be set to un-cached */
2632                  if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2633                      newStream->usage |=
2634                           (GRALLOC_USAGE_SW_READ_RARELY |
2635                           GRALLOC_USAGE_SW_WRITE_RARELY |
2636                           GRALLOC_USAGE_HW_CAMERA_WRITE);
2637                  else if (IS_USAGE_ZSL(newStream->usage))
2638                  {
2639                      LOGD("ZSL usage flag skipping");
2640                  }
2641                  else if (newStream == zslStream
2642                          || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2643                      newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2644                  } else
2645                      newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2646                  break;
2647              default:
2648                  LOGE("Invalid stream_type %d", newStream->stream_type);
2649                  break;
2650              }
2651  
2652              bool forcePreviewUBWC = true;
2653              if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2654                      newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2655                  QCamera3ProcessingChannel *channel = NULL;
2656                  switch (newStream->format) {
2657                  case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2658                      if ((newStream->usage &
2659                              private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2660                              (streamList->operation_mode ==
2661                              CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2662                      ) {
2663                          channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2664                                  mChannelHandle, mCameraHandle->ops, captureResultCb,
2665                                  setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2666                                  this,
2667                                  newStream,
2668                                  (cam_stream_type_t)
2669                                          mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2670                                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2671                                  mMetadataChannel,
2672                                  0); //heap buffers are not required for HFR video channel
2673                          if (channel == NULL) {
2674                              LOGE("allocation of channel failed");
2675                              pthread_mutex_unlock(&mMutex);
2676                              return -ENOMEM;
2677                          }
2678                          //channel->getNumBuffers() will return 0 here so use
2679                          //MAX_INFLIGH_HFR_REQUESTS
2680                          newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2681                          newStream->priv = channel;
2682                          LOGI("num video buffers in HFR mode: %d",
2683                                   MAX_INFLIGHT_HFR_REQUESTS);
2684                      } else {
2685                          /* Copy stream contents in HFR preview only case to create
2686                           * dummy batch channel so that sensor streaming is in
2687                           * HFR mode */
2688                          if (!m_bIsVideo && (streamList->operation_mode ==
2689                                  CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2690                              mDummyBatchStream = *newStream;
2691                              mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
2692                          }
2693                          int bufferCount = MAX_INFLIGHT_REQUESTS;
2694                          if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2695                                  CAM_STREAM_TYPE_VIDEO) {
2696                              if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2697                                  // WAR: 4K video can only run <=30fps, reduce the buffer count.
2698                                  bufferCount = m_bIs4KVideo ?
2699                                      MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2700                              }
2701  
2702                          }
2703                          channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2704                                  mChannelHandle, mCameraHandle->ops, captureResultCb,
2705                                  setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2706                                  this,
2707                                  newStream,
2708                                  (cam_stream_type_t)
2709                                          mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710                                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711                                  mMetadataChannel,
2712                                  bufferCount);
2713                          if (channel == NULL) {
2714                              LOGE("allocation of channel failed");
2715                              pthread_mutex_unlock(&mMutex);
2716                              return -ENOMEM;
2717                          }
2718                          /* disable UBWC for preview, though supported,
2719                           * to take advantage of CPP duplication */
2720                          if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
2721                                  (previewSize.width == (int32_t)videoWidth)&&
2722                                  (previewSize.height == (int32_t)videoHeight)){
2723                              forcePreviewUBWC = false;
2724                          }
2725                          channel->setUBWCEnabled(forcePreviewUBWC);
2726                           /* When goog_zoom is linked to the preview or video stream,
2727                            * disable ubwc to the linked stream */
2728                          if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2729                                  CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2730                              channel->setUBWCEnabled(false);
2731                          }
2732                          newStream->max_buffers = channel->getNumBuffers();
2733                          newStream->priv = channel;
2734                      }
2735                      break;
2736                  case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2737                      channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2738                              mChannelHandle,
2739                              mCameraHandle->ops, captureResultCb,
2740                              setBufferErrorStatus, &padding_info,
2741                              this,
2742                              newStream,
2743                              (cam_stream_type_t)
2744                                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2745                              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2746                              mMetadataChannel);
2747                      if (channel == NULL) {
2748                          LOGE("allocation of YUV channel failed");
2749                          pthread_mutex_unlock(&mMutex);
2750                          return -ENOMEM;
2751                      }
2752                      newStream->max_buffers = channel->getNumBuffers();
2753                      newStream->priv = channel;
2754                      break;
2755                  }
2756                  case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2757                  case HAL_PIXEL_FORMAT_RAW16:
2758                  case HAL_PIXEL_FORMAT_RAW10: {
2759                      bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2760                              (HAL_DATASPACE_DEPTH != newStream->data_space))
2761                              ? true : false;
2762                      mRawChannel = new QCamera3RawChannel(
2763                              mCameraHandle->camera_handle, mChannelHandle,
2764                              mCameraHandle->ops, captureResultCb,
2765                              setBufferErrorStatus, &padding_info,
2766                              this, newStream,
2767                              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2768                              mMetadataChannel, isRAW16);
2769                      if (mRawChannel == NULL) {
2770                          LOGE("allocation of raw channel failed");
2771                          pthread_mutex_unlock(&mMutex);
2772                          return -ENOMEM;
2773                      }
2774                      newStream->max_buffers = mRawChannel->getNumBuffers();
2775                      newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2776                      break;
2777                  }
2778                  case HAL_PIXEL_FORMAT_BLOB:
2779                      if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2780                          mDepthChannel = new QCamera3DepthChannel(
2781                                  mCameraHandle->camera_handle, mChannelHandle,
2782                                  mCameraHandle->ops, NULL, NULL, &padding_info,
2783                                  0, this, MAX_INFLIGHT_REQUESTS, newStream,
2784                                  mMetadataChannel);
2785                          if (NULL == mDepthChannel) {
2786                              LOGE("Allocation of depth channel failed");
2787                              pthread_mutex_unlock(&mMutex);
2788                              return NO_MEMORY;
2789                          }
2790                          newStream->priv = mDepthChannel;
2791                          newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2792                      } else {
2793                          // Max live snapshot inflight buffer is 1. This is to mitigate
2794                          // frame drop issues for video snapshot. The more buffers being
2795                          // allocated, the more frame drops there are.
2796                          mPictureChannel = new QCamera3PicChannel(
2797                                  mCameraHandle->camera_handle, mChannelHandle,
2798                                  mCameraHandle->ops, captureResultCb,
2799                                  setBufferErrorStatus, &padding_info, this, newStream,
2800                                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2801                                  m_bIs4KVideo, isZsl, mMetadataChannel,
2802                                  (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2803                          if (mPictureChannel == NULL) {
2804                              LOGE("allocation of channel failed");
2805                              pthread_mutex_unlock(&mMutex);
2806                              return -ENOMEM;
2807                          }
2808                          newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2809                          newStream->max_buffers = mPictureChannel->getNumBuffers();
2810                          mPictureChannel->overrideYuvSize(
2811                                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2812                                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2813                      }
2814                      break;
2815  
2816                  default:
2817                      LOGE("not a supported format 0x%x", newStream->format);
2818                      pthread_mutex_unlock(&mMutex);
2819                      return -EINVAL;
2820                  }
2821              } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2822                  newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2823              } else {
2824                  LOGE("Error, Unknown stream type");
2825                  pthread_mutex_unlock(&mMutex);
2826                  return -EINVAL;
2827              }
2828  
2829              QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2830              if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2831                  // Here we only care whether it's EIS3 or not
2832                  cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2833                  if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2834                          mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2835                      isType = IS_TYPE_NONE;
2836                  cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
2837                          mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2838                          newStream->width, newStream->height, forcePreviewUBWC, isType);
2839                  if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2840                      newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2841                  }
2842              }
2843  
2844              for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2845                      it != mStreamInfo.end(); it++) {
2846                  if ((*it)->stream == newStream) {
2847                      (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2848                      break;
2849                  }
2850              }
2851          } else {
2852              // Channel already exists for this stream
2853              // Do nothing for now
2854          }
2855          padding_info = gCamCapability[mCameraId]->padding_info;
2856  
2857          /* Do not add entries for input&depth stream in metastream info
2858           * since there is no real stream associated with it
2859           */
2860          if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2861                  !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2862                          (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
2863              mStreamConfigInfo.num_streams++;
2864          }
2865      }
2866  
2867      // Let buffer dispatcher know the configured streams.
2868      mOutputBufferDispatcher.configureStreams(streamList);
2869  
2870      if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2871          onlyRaw = false;
2872      }
2873  
2874      // Create analysis stream all the time, even when h/w support is not available
2875      if (!onlyRaw) {
2876          cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2877          cam_analysis_info_t analysisInfo;
2878          int32_t ret = NO_ERROR;
2879          ret = mCommon.getAnalysisInfo(
2880                  FALSE,
2881                  analysisFeatureMask,
2882                  &analysisInfo);
2883          if (ret == NO_ERROR) {
2884              cam_color_filter_arrangement_t analysis_color_arrangement =
2885                      (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2886                      CAM_FILTER_ARRANGEMENT_Y :
2887                      gCamCapability[mCameraId]->color_arrangement);
2888              setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2889                                                 analysis_color_arrangement);
2890              cam_dimension_t analysisDim;
2891              analysisDim = mCommon.getMatchingDimension(previewSize,
2892                      analysisInfo.analysis_recommended_res);
2893  
2894              mAnalysisChannel = new QCamera3SupportChannel(
2895                      mCameraHandle->camera_handle,
2896                      mChannelHandle,
2897                      mCameraHandle->ops,
2898                      &analysisInfo.analysis_padding_info,
2899                      analysisFeatureMask,
2900                      CAM_STREAM_TYPE_ANALYSIS,
2901                      &analysisDim,
2902                      (analysisInfo.analysis_format
2903                      == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2904                      : CAM_FORMAT_YUV_420_NV21),
2905                      analysisInfo.hw_analysis_supported,
2906                      gCamCapability[mCameraId]->color_arrangement,
2907                      this,
2908                      0); // force buffer count to 0
2909          } else {
2910              LOGW("getAnalysisInfo failed, ret = %d", ret);
2911          }
2912          if (!mAnalysisChannel) {
2913              LOGW("Analysis channel cannot be created");
2914          }
2915      }
2916  
2917      //RAW DUMP channel
2918      if (mEnableRawDump && isRawStreamRequested == false){
2919          cam_dimension_t rawDumpSize;
2920          rawDumpSize = getMaxRawSize(mCameraId);
2921          cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2922          setPAAFSupport(rawDumpFeatureMask,
2923                  CAM_STREAM_TYPE_RAW,
2924                  gCamCapability[mCameraId]->color_arrangement);
2925          mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2926                                    mChannelHandle,
2927                                    mCameraHandle->ops,
2928                                    rawDumpSize,
2929                                    &padding_info,
2930                                    this, rawDumpFeatureMask);
2931          if (!mRawDumpChannel) {
2932              LOGE("Raw Dump channel cannot be created");
2933              pthread_mutex_unlock(&mMutex);
2934              return -ENOMEM;
2935          }
2936      }
2937  
2938      if (mAnalysisChannel) {
2939          cam_analysis_info_t analysisInfo;
2940          memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2941          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2942                  CAM_STREAM_TYPE_ANALYSIS;
2943          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2944                  CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2945          rc = mCommon.getAnalysisInfo(FALSE,
2946                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2947                  &analysisInfo);
2948          if (rc != NO_ERROR) {
2949              LOGE("getAnalysisInfo failed, ret = %d", rc);
2950              pthread_mutex_unlock(&mMutex);
2951              return rc;
2952          }
2953          cam_color_filter_arrangement_t analysis_color_arrangement =
2954                  (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2955                  CAM_FILTER_ARRANGEMENT_Y :
2956                  gCamCapability[mCameraId]->color_arrangement);
2957          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2958                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2959                  analysis_color_arrangement);
2960  
2961          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2962                  mCommon.getMatchingDimension(previewSize,
2963                  analysisInfo.analysis_recommended_res);
2964          mStreamConfigInfo.num_streams++;
2965      }
2966  
2967      if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2968          cam_analysis_info_t supportInfo;
2969          memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2970          cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2971          setPAAFSupport(callbackFeatureMask,
2972                  CAM_STREAM_TYPE_CALLBACK,
2973                  gCamCapability[mCameraId]->color_arrangement);
2974          int32_t ret = NO_ERROR;
2975          ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
2976          if (ret != NO_ERROR) {
2977              /* Ignore the error for Mono camera
2978               * because the PAAF bit mask is only set
2979               * for CAM_STREAM_TYPE_ANALYSIS stream type
2980               */
2981              if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2982                  LOGW("getAnalysisInfo failed, ret = %d", ret);
2983              }
2984          }
2985          mSupportChannel = new QCamera3SupportChannel(
2986                  mCameraHandle->camera_handle,
2987                  mChannelHandle,
2988                  mCameraHandle->ops,
2989                  &gCamCapability[mCameraId]->padding_info,
2990                  callbackFeatureMask,
2991                  CAM_STREAM_TYPE_CALLBACK,
2992                  &QCamera3SupportChannel::kDim,
2993                  CAM_FORMAT_YUV_420_NV21,
2994                  supportInfo.hw_analysis_supported,
2995                  gCamCapability[mCameraId]->color_arrangement,
2996                  this, 0);
2997          if (!mSupportChannel) {
2998              LOGE("dummy channel cannot be created");
2999              pthread_mutex_unlock(&mMutex);
3000              return -ENOMEM;
3001          }
3002      }
3003  
3004      if (mSupportChannel) {
3005          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3006                  QCamera3SupportChannel::kDim;
3007          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3008                  CAM_STREAM_TYPE_CALLBACK;
3009          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3010                  CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3011          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3012                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3013                  gCamCapability[mCameraId]->color_arrangement);
3014          mStreamConfigInfo.num_streams++;
3015      }
3016  
3017      if (mRawDumpChannel) {
3018          cam_dimension_t rawSize;
3019          rawSize = getMaxRawSize(mCameraId);
3020          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3021                  rawSize;
3022          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3023                  CAM_STREAM_TYPE_RAW;
3024          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3025                  CAM_QCOM_FEATURE_NONE;
3026          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3027                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3028                  gCamCapability[mCameraId]->color_arrangement);
3029          mStreamConfigInfo.num_streams++;
3030      }
3031  
3032      if (mHdrPlusRawSrcChannel) {
3033          cam_dimension_t rawSize;
3034          rawSize = getMaxRawSize(mCameraId);
3035          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3036          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3037          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3038          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3039                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3040                  gCamCapability[mCameraId]->color_arrangement);
3041          mStreamConfigInfo.num_streams++;
3042      }
3043  
3044      /* In HFR mode, if video stream is not added, create a dummy channel so that
3045       * ISP can create a batch mode even for preview only case. This channel is
3046       * never 'start'ed (no stream-on), it is only 'initialized'  */
3047      if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3048              !m_bIsVideo) {
3049          cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3050          setPAAFSupport(dummyFeatureMask,
3051                  CAM_STREAM_TYPE_VIDEO,
3052                  gCamCapability[mCameraId]->color_arrangement);
3053          mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3054                  mChannelHandle,
3055                  mCameraHandle->ops, captureResultCb,
3056                  setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
3057                  this,
3058                  &mDummyBatchStream,
3059                  CAM_STREAM_TYPE_VIDEO,
3060                  dummyFeatureMask,
3061                  mMetadataChannel);
3062          if (NULL == mDummyBatchChannel) {
3063              LOGE("creation of mDummyBatchChannel failed."
3064                      "Preview will use non-hfr sensor mode ");
3065          }
3066      }
3067      if (mDummyBatchChannel) {
3068          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3069                  mDummyBatchStream.width;
3070          mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3071                  mDummyBatchStream.height;
3072          mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3073                  CAM_STREAM_TYPE_VIDEO;
3074          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3075                  CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3076          setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3077                  mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3078                  gCamCapability[mCameraId]->color_arrangement);
3079          mStreamConfigInfo.num_streams++;
3080      }
3081  
3082      mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3083      mStreamConfigInfo.buffer_info.max_buffers =
3084              m_bIs4KVideo ? 0 :
3085              m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3086  
3087      /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3088      for (pendingRequestIterator i = mPendingRequestsList.begin();
3089              i != mPendingRequestsList.end();) {
3090          i = erasePendingRequest(i);
3091      }
3092      mPendingFrameDropList.clear();
3093      // Initialize/Reset the pending buffers list
3094      for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3095          req.mPendingBufferList.clear();
3096      }
3097      mPendingBuffersMap.mPendingBuffersInRequest.clear();
3098      mExpectedInflightDuration = 0;
3099      mExpectedFrameDuration = 0;
3100  
3101      mCurJpegMeta.clear();
3102      //Get min frame duration for this streams configuration
3103      deriveMinFrameDuration();
3104  
3105      mFirstPreviewIntentSeen = false;
3106  
3107      // Update state
3108      mState = CONFIGURED;
3109  
3110      mFirstMetadataCallback = true;
3111  
3112      if (streamList->session_parameters != nullptr) {
3113          CameraMetadata meta;
3114          meta = streamList->session_parameters;
3115  
3116          // send an unconfigure to the backend so that the isp
3117          // resources are deallocated
3118          if (!mFirstConfiguration) {
3119              cam_stream_size_info_t stream_config_info;
3120              int32_t hal_version = CAM_HAL_V3;
3121              memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3122              stream_config_info.buffer_info.min_buffers =
3123                      MIN_INFLIGHT_REQUESTS;
3124              stream_config_info.buffer_info.max_buffers =
3125                      m_bIs4KVideo ? 0 :
3126                      m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3127              clear_metadata_buffer(mParameters);
3128              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3129                      CAM_INTF_PARM_HAL_VERSION, hal_version);
3130              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3131                      CAM_INTF_META_STREAM_INFO, stream_config_info);
3132              rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3133                      mParameters);
3134              if (rc < 0) {
3135                  LOGE("set_parms for unconfigure failed");
3136                  pthread_mutex_unlock(&mMutex);
3137                  return rc;
3138              }
3139  
3140          }
3141          /* get eis information for stream configuration */
3142          cam_is_type_t isTypePreview, is_type=IS_TYPE_NONE;
3143          char is_type_value[PROPERTY_VALUE_MAX];
3144          property_get("persist.camera.is_type", is_type_value, "4");
3145          m_ISTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3146          // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3147          property_get("persist.camera.is_type_preview", is_type_value, "4");
3148          isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3149          LOGD("isTypeVideo: %d isTypePreview: %d", m_ISTypeVideo, isTypePreview);
3150  
3151          int32_t hal_version = CAM_HAL_V3;
3152          clear_metadata_buffer(mParameters);
3153          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3154          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
3155  
3156          if (mFirstConfiguration) {
3157              // configure instant AEC
3158              // Instant AEC is a session based parameter and it is needed only
3159              // once per complete session after open camera.
3160              // i.e. This is set only once for the first capture request, after open camera.
3161              setInstantAEC(meta);
3162          }
3163  
3164          bool setEis = isEISEnabled(meta);
3165          int32_t vsMode;
3166          vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3167          if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3168              rc = BAD_VALUE;
3169          }
3170          LOGD("setEis %d", setEis);
3171          bool eis3Supported = false;
3172          size_t count = IS_TYPE_MAX;
3173          count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3174          for (size_t i = 0; i < count; i++) {
3175              if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3176                  eis3Supported = true;
3177                  break;
3178              }
3179          }
3180  
3181          //IS type will be 0 unless EIS is supported. If EIS is supported
3182          //it could either be 4 or 5 depending on the stream and video size
3183          for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3184              if (setEis) {
3185                  if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3186                      is_type = isTypePreview;
3187                  } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3188                      if ( (m_ISTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3189                          LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
3190                          is_type = IS_TYPE_EIS_2_0;
3191                      } else {
3192                          is_type = m_ISTypeVideo;
3193                      }
3194                  } else {
3195                      is_type = IS_TYPE_NONE;
3196                  }
3197                   mStreamConfigInfo.is_type[i] = is_type;
3198              } else {
3199                   mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3200              }
3201          }
3202  
3203          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3204                  CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3205  
3206          char prop[PROPERTY_VALUE_MAX];
3207          //Disable tintless only if the property is set to 0
3208          memset(prop, 0, sizeof(prop));
3209          property_get("persist.camera.tintless.enable", prop, "1");
3210          int32_t tintless_value = atoi(prop);
3211  
3212          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3213                  CAM_INTF_PARM_TINTLESS, tintless_value);
3214  
3215          //Disable CDS for HFR mode or if DIS/EIS is on.
3216          //CDS is a session parameter in the backend/ISP, so need to be set/reset
3217          //after every configure_stream
3218          if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3219                  (m_bIsVideo)) {
3220              int32_t cds = CAM_CDS_MODE_OFF;
3221              if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3222                      CAM_INTF_PARM_CDS_MODE, cds))
3223                  LOGE("Failed to disable CDS for HFR mode");
3224  
3225          }
3226  
3227          if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3228              uint8_t* use_av_timer = NULL;
3229  
3230              if (m_debug_avtimer){
3231                  LOGI(" Enabling AV timer through setprop");
3232                  use_av_timer = &m_debug_avtimer;
3233                  m_bAVTimerEnabled = true;
3234              }
3235              else{
3236                  use_av_timer =
3237                      meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3238                  if (use_av_timer) {
3239                      m_bAVTimerEnabled = true;
3240                      LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
3241                  }
3242              }
3243  
3244              if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3245                  rc = BAD_VALUE;
3246              }
3247          }
3248  
3249          setMobicat();
3250  
3251          /* Set fps and hfr mode while sending meta stream info so that sensor
3252           * can configure appropriate streaming mode */
3253          mHFRVideoFps = DEFAULT_VIDEO_FPS;
3254          mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3255          mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3256          if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3257              rc = setHalFpsRange(meta, mParameters);
3258              if (rc == NO_ERROR) {
3259                  int32_t max_fps =
3260                      (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3261                  if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3262                      mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3263                  }
3264                  /* For HFR, more buffers are dequeued upfront to improve the performance */
3265                  if (mBatchSize) {
3266                      mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3267                      mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3268                  }
3269              }
3270              else {
3271                  LOGE("setHalFpsRange failed");
3272              }
3273          }
3274          memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3275  
3276          if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3277              cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3278                      meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3279              rc = setVideoHdrMode(mParameters, vhdr);
3280              if (rc != NO_ERROR) {
3281                  LOGE("setVideoHDR is failed");
3282              }
3283          }
3284  
3285          if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
3286              uint8_t sensorModeFullFov =
3287                      meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
3288              LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
3289              if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
3290                      sensorModeFullFov)) {
3291                  rc = BAD_VALUE;
3292              }
3293          }
3294          //TODO: validate the arguments, HSV scenemode should have only the
3295          //advertised fps ranges
3296  
3297          /*set the capture intent, hal version, tintless, stream info,
3298           *and disenable parameters to the backend*/
3299          LOGD("set_parms META_STREAM_INFO " );
3300          for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3301              LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
3302                      ", Format:%d is_type: %d",
3303                      mStreamConfigInfo.type[i],
3304                      mStreamConfigInfo.stream_sizes[i].width,
3305                      mStreamConfigInfo.stream_sizes[i].height,
3306                      mStreamConfigInfo.postprocess_mask[i],
3307                      mStreamConfigInfo.format[i],
3308                      mStreamConfigInfo.is_type[i]);
3309          }
3310  
3311          rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3312                      mParameters);
3313          if (rc < 0) {
3314              LOGE("set_parms failed for hal version, stream info");
3315          }
3316  
3317      }
3318  
3319      pthread_mutex_unlock(&mMutex);
3320  
3321      return rc;
3322  }
3323  
3324  /*===========================================================================
3325   * FUNCTION   : isEISEnabled
3326   *
3327   * DESCRIPTION: Decide whether EIS should get enabled or not.
3328   *
3329   * PARAMETERS :
3330   *   @meta : request from framework to process
3331   *
3332   * RETURN     : true/false Whether EIS should be enabled
3333   *
3334   *==========================================================================*/
isEISEnabled(const CameraMetadata & meta)3335  bool QCamera3HardwareInterface::isEISEnabled(const CameraMetadata& meta) {
3336      uint8_t fwkVideoStabMode = 0;
3337      if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3338          fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3339      }
3340  
3341      // If EIS setprop is enabled then only turn it on for video/preview
3342      return  m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
3343          (m_ISTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
3344  }
3345  
3346  /*===========================================================================
3347   * FUNCTION   : validateCaptureRequest
3348   *
3349   * DESCRIPTION: validate a capture request from camera service
3350   *
3351   * PARAMETERS :
3352   *   @request : request from framework to process
3353   *
3354   * RETURN     :
3355   *
3356   *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)3357  int QCamera3HardwareInterface::validateCaptureRequest(
3358                      camera3_capture_request_t *request,
3359                      List<InternalRequest> &internallyRequestedStreams)
3360  {
3361      ssize_t idx = 0;
3362      const camera3_stream_buffer_t *b;
3363      CameraMetadata meta;
3364  
3365      /* Sanity check the request */
3366      if (request == NULL) {
3367          LOGE("NULL capture request");
3368          return BAD_VALUE;
3369      }
3370  
3371      if ((request->settings == NULL) && (mState == CONFIGURED)) {
3372          /*settings cannot be null for the first request*/
3373          return BAD_VALUE;
3374      }
3375  
3376      uint32_t frameNumber = request->frame_number;
3377      if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3378              && (internallyRequestedStreams.size() == 0)) {
3379          LOGE("Request %d: No output buffers provided!",
3380                  __FUNCTION__, frameNumber);
3381          return BAD_VALUE;
3382      }
3383      if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3384          LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3385                   request->num_output_buffers, MAX_NUM_STREAMS);
3386          return BAD_VALUE;
3387      }
3388      if (request->input_buffer != NULL) {
3389          b = request->input_buffer;
3390          if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3391              LOGE("Request %d: Buffer %ld: Status not OK!",
3392                       frameNumber, (long)idx);
3393              return BAD_VALUE;
3394          }
3395          if (b->release_fence != -1) {
3396              LOGE("Request %d: Buffer %ld: Has a release fence!",
3397                       frameNumber, (long)idx);
3398              return BAD_VALUE;
3399          }
3400          if (b->buffer == NULL) {
3401              LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3402                       frameNumber, (long)idx);
3403              return BAD_VALUE;
3404          }
3405      }
3406  
3407      // Validate all buffers
3408      b = request->output_buffers;
3409      if (b == NULL) {
3410         return BAD_VALUE;
3411      }
3412      while (idx < (ssize_t)request->num_output_buffers) {
3413          QCamera3ProcessingChannel *channel =
3414                  static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3415          if (channel == NULL) {
3416              LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3417                       frameNumber, (long)idx);
3418              return BAD_VALUE;
3419          }
3420          if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3421              LOGE("Request %d: Buffer %ld: Status not OK!",
3422                       frameNumber, (long)idx);
3423              return BAD_VALUE;
3424          }
3425          if (b->release_fence != -1) {
3426              LOGE("Request %d: Buffer %ld: Has a release fence!",
3427                       frameNumber, (long)idx);
3428              return BAD_VALUE;
3429          }
3430          if (b->buffer == NULL) {
3431              LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3432                       frameNumber, (long)idx);
3433              return BAD_VALUE;
3434          }
3435          if (*(b->buffer) == NULL) {
3436              LOGE("Request %d: Buffer %ld: NULL private handle!",
3437                       frameNumber, (long)idx);
3438              return BAD_VALUE;
3439          }
3440          idx++;
3441          b = request->output_buffers + idx;
3442      }
3443      return NO_ERROR;
3444  }
3445  
3446  /*===========================================================================
3447   * FUNCTION   : deriveMinFrameDuration
3448   *
3449   * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3450   *              on currently configured streams.
3451   *
3452   * PARAMETERS : NONE
3453   *
3454   * RETURN     : NONE
3455   *
3456   *==========================================================================*/
deriveMinFrameDuration()3457  void QCamera3HardwareInterface::deriveMinFrameDuration()
3458  {
3459      int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3460      bool hasRaw = false;
3461  
3462      mMinRawFrameDuration = 0;
3463      mMinJpegFrameDuration = 0;
3464      mMinProcessedFrameDuration = 0;
3465  
3466      maxJpegDim = 0;
3467      maxProcessedDim = 0;
3468      maxRawDim = 0;
3469  
3470      // Figure out maximum jpeg, processed, and raw dimensions
3471      for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3472          it != mStreamInfo.end(); it++) {
3473  
3474          // Input stream doesn't have valid stream_type
3475          if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3476              continue;
3477  
3478          int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3479          if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3480              if (dimension > maxJpegDim)
3481                  maxJpegDim = dimension;
3482          } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3483                  (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3484                  (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3485              hasRaw = true;
3486              if (dimension > maxRawDim)
3487                  maxRawDim = dimension;
3488          } else {
3489              if (dimension > maxProcessedDim)
3490                  maxProcessedDim = dimension;
3491          }
3492      }
3493  
3494      size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3495              MAX_SIZES_CNT);
3496  
3497      //Assume all jpeg dimensions are in processed dimensions.
3498      if (maxJpegDim > maxProcessedDim)
3499          maxProcessedDim = maxJpegDim;
3500      //Find the smallest raw dimension that is greater or equal to jpeg dimension
3501      if (hasRaw && maxProcessedDim > maxRawDim) {
3502          maxRawDim = INT32_MAX;
3503  
3504          for (size_t i = 0; i < count; i++) {
3505              int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3506                      gCamCapability[mCameraId]->raw_dim[i].height;
3507              if (dimension >= maxProcessedDim && dimension < maxRawDim)
3508                  maxRawDim = dimension;
3509          }
3510      }
3511  
3512      //Find minimum durations for processed, jpeg, and raw
3513      for (size_t i = 0; i < count; i++) {
3514          if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3515                  gCamCapability[mCameraId]->raw_dim[i].height) {
3516              mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3517              break;
3518          }
3519      }
3520      count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3521      for (size_t i = 0; i < count; i++) {
3522          if (maxProcessedDim ==
3523                  gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3524                  gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3525              mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3526              mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3527              break;
3528          }
3529      }
3530  }
3531  
3532  /*===========================================================================
3533   * FUNCTION   : getMinFrameDuration
3534   *
3535   * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3536   *              and current request configuration.
3537   *
3538   * PARAMETERS : @request: requset sent by the frameworks
3539   *
3540   * RETURN     : min farme duration for a particular request
3541   *
3542   *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)3543  int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3544  {
3545      bool hasJpegStream = false;
3546      bool hasRawStream = false;
3547      for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3548          const camera3_stream_t *stream = request->output_buffers[i].stream;
3549          if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3550              hasJpegStream = true;
3551          else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3552                  stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3553                  stream->format == HAL_PIXEL_FORMAT_RAW16)
3554              hasRawStream = true;
3555      }
3556  
3557      if (!hasJpegStream)
3558          return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3559      else
3560          return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3561  }
3562  
3563  /*===========================================================================
3564   * FUNCTION   : handleBuffersDuringFlushLock
3565   *
3566   * DESCRIPTION: Account for buffers returned from back-end during flush
3567   *              This function is executed while mMutex is held by the caller.
3568   *
3569   * PARAMETERS :
3570   *   @buffer: image buffer for the callback
3571   *
3572   * RETURN     :
3573   *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)3574  void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3575  {
3576      bool buffer_found = false;
3577      for (List<PendingBuffersInRequest>::iterator req =
3578              mPendingBuffersMap.mPendingBuffersInRequest.begin();
3579              req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3580          for (List<PendingBufferInfo>::iterator i =
3581                  req->mPendingBufferList.begin();
3582                  i != req->mPendingBufferList.end(); i++) {
3583              if (i->buffer == buffer->buffer) {
3584                  mPendingBuffersMap.numPendingBufsAtFlush--;
3585                  LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3586                      buffer->buffer, req->frame_number,
3587                      mPendingBuffersMap.numPendingBufsAtFlush);
3588                  buffer_found = true;
3589                  break;
3590              }
3591          }
3592          if (buffer_found) {
3593              break;
3594          }
3595      }
3596      if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3597          //signal the flush()
3598          LOGD("All buffers returned to HAL. Continue flush");
3599          pthread_cond_signal(&mBuffersCond);
3600      }
3601  }
3602  
3603  /*===========================================================================
3604   * FUNCTION   : handleBatchMetadata
3605   *
3606   * DESCRIPTION: Handles metadata buffer callback in batch mode
3607   *
3608   * PARAMETERS : @metadata_buf: metadata buffer
3609   *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3610   *                 the meta buf in this method
3611   *
3612   * RETURN     :
3613   *
3614   *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3615  void QCamera3HardwareInterface::handleBatchMetadata(
3616          mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3617  {
3618      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
3619  
3620      if (NULL == metadata_buf) {
3621          LOGE("metadata_buf is NULL");
3622          return;
3623      }
3624      /* In batch mode, the metdata will contain the frame number and timestamp of
3625       * the last frame in the batch. Eg: a batch containing buffers from request
3626       * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3627       * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3628       * multiple process_capture_results */
3629      metadata_buffer_t *metadata =
3630              (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3631      int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3632      uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3633      uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3634      uint32_t frame_number = 0, urgent_frame_number = 0;
3635      int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3636      bool invalid_metadata = false;
3637      size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3638      size_t loopCount = 1;
3639      bool is_metabuf_queued = false;
3640  
3641      int32_t *p_frame_number_valid =
3642              POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3643      uint32_t *p_frame_number =
3644              POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3645      int64_t *p_capture_time =
3646              POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3647      int32_t *p_urgent_frame_number_valid =
3648              POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3649      uint32_t *p_urgent_frame_number =
3650              POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3651  
3652      if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3653              (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3654              (NULL == p_urgent_frame_number)) {
3655          LOGE("Invalid metadata");
3656          invalid_metadata = true;
3657      } else {
3658          frame_number_valid = *p_frame_number_valid;
3659          last_frame_number = *p_frame_number;
3660          last_frame_capture_time = *p_capture_time;
3661          urgent_frame_number_valid = *p_urgent_frame_number_valid;
3662          last_urgent_frame_number = *p_urgent_frame_number;
3663      }
3664  
3665      /* In batchmode, when no video buffers are requested, set_parms are sent
3666       * for every capture_request. The difference between consecutive urgent
3667       * frame numbers and frame numbers should be used to interpolate the
3668       * corresponding frame numbers and time stamps */
3669      pthread_mutex_lock(&mMutex);
3670      if (urgent_frame_number_valid) {
3671          ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3672          if(idx < 0) {
3673              LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3674                  last_urgent_frame_number);
3675              mState = ERROR;
3676              pthread_mutex_unlock(&mMutex);
3677              return;
3678          }
3679          first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3680          urgentFrameNumDiff = last_urgent_frame_number + 1 -
3681                  first_urgent_frame_number;
3682  
3683          LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3684                   urgent_frame_number_valid,
3685                  first_urgent_frame_number, last_urgent_frame_number);
3686      }
3687  
3688      if (frame_number_valid) {
3689          ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3690          if(idx < 0) {
3691              LOGE("Invalid frame number received: %d. Irrecoverable error",
3692                  last_frame_number);
3693              mState = ERROR;
3694              pthread_mutex_unlock(&mMutex);
3695              return;
3696          }
3697          first_frame_number = mPendingBatchMap.valueAt(idx);
3698          frameNumDiff = last_frame_number + 1 -
3699                  first_frame_number;
3700          mPendingBatchMap.removeItem(last_frame_number);
3701  
3702          LOGD("frm: valid: %d frm_num: %d - %d",
3703                   frame_number_valid,
3704                  first_frame_number, last_frame_number);
3705  
3706      }
3707      pthread_mutex_unlock(&mMutex);
3708  
3709      if (urgent_frame_number_valid || frame_number_valid) {
3710          loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3711          if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3712              LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3713                       urgentFrameNumDiff, last_urgent_frame_number);
3714          if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3715              LOGE("frameNumDiff: %d frameNum: %d",
3716                       frameNumDiff, last_frame_number);
3717      }
3718  
3719      for (size_t i = 0; i < loopCount; i++) {
3720          /* handleMetadataWithLock is called even for invalid_metadata for
3721           * pipeline depth calculation */
3722          if (!invalid_metadata) {
3723              /* Infer frame number. Batch metadata contains frame number of the
3724               * last frame */
3725              if (urgent_frame_number_valid) {
3726                  if (i < urgentFrameNumDiff) {
3727                      urgent_frame_number =
3728                              first_urgent_frame_number + i;
3729                      LOGD("inferred urgent frame_number: %d",
3730                               urgent_frame_number);
3731                      ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3732                              CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3733                  } else {
3734                      /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3735                      ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3736                              CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3737                  }
3738              }
3739  
3740              /* Infer frame number. Batch metadata contains frame number of the
3741               * last frame */
3742              if (frame_number_valid) {
3743                  if (i < frameNumDiff) {
3744                      frame_number = first_frame_number + i;
3745                      LOGD("inferred frame_number: %d", frame_number);
3746                      ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3747                              CAM_INTF_META_FRAME_NUMBER, frame_number);
3748                  } else {
3749                      /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3750                      ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3751                               CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3752                  }
3753              }
3754  
3755              if (last_frame_capture_time) {
3756                  //Infer timestamp
3757                  first_frame_capture_time = last_frame_capture_time -
3758                          (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3759                  capture_time =
3760                          first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3761                  ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3762                          CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3763                  LOGD("batch capture_time: %lld, capture_time: %lld",
3764                           last_frame_capture_time, capture_time);
3765              }
3766          }
3767          pthread_mutex_lock(&mMutex);
3768          handleMetadataWithLock(metadata_buf,
3769                  false /* free_and_bufdone_meta_buf */,
3770                  (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3771                  (i == frameNumDiff-1), /* last metadata in the batch metadata */
3772                  &is_metabuf_queued /* if metabuf isqueued or not */);
3773          pthread_mutex_unlock(&mMutex);
3774      }
3775  
3776      /* BufDone metadata buffer */
3777      if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
3778          mMetadataChannel->bufDone(metadata_buf);
3779          free(metadata_buf);
3780          metadata_buf = NULL;
3781      }
3782  }
3783  
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3784  void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3785          camera3_error_msg_code_t errorCode)
3786  {
3787      camera3_notify_msg_t notify_msg;
3788      memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3789      notify_msg.type = CAMERA3_MSG_ERROR;
3790      notify_msg.message.error.error_code = errorCode;
3791      notify_msg.message.error.error_stream = NULL;
3792      notify_msg.message.error.frame_number = frameNumber;
3793      orchestrateNotify(&notify_msg);
3794  
3795      return;
3796  }
3797  
3798  /*===========================================================================
3799   * FUNCTION   : sendPartialMetadataWithLock
3800   *
3801   * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3802   *
3803   * PARAMETERS : @metadata: metadata buffer
3804   *              @requestIter: The iterator for the pending capture request for
3805   *              which the partial result is being sen
3806   *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3807   *                  last urgent metadata in a batch. Always true for non-batch mode
3808   *              @isJumpstartMetadata: Whether this is a partial metadata for
3809   *              jumpstart, i.e. even though it doesn't map to a valid partial
3810   *              frame number, its metadata entries should be kept.
3811   *
3812   * RETURN     :
3813   *
3814   *==========================================================================*/
3815  
sendPartialMetadataWithLock(metadata_buffer_t * metadata,const pendingRequestIterator requestIter,bool lastUrgentMetadataInBatch,bool isJumpstartMetadata)3816  void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3817          metadata_buffer_t *metadata,
3818          const pendingRequestIterator requestIter,
3819          bool lastUrgentMetadataInBatch,
3820          bool isJumpstartMetadata)
3821  {
3822      camera3_capture_result_t result;
3823      memset(&result, 0, sizeof(camera3_capture_result_t));
3824  
3825      requestIter->partial_result_cnt++;
3826  
3827      // Extract 3A metadata
3828      result.result = translateCbUrgentMetadataToResultMetadata(
3829              metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3830              isJumpstartMetadata);
3831      // Populate metadata result
3832      result.frame_number = requestIter->frame_number;
3833      result.num_output_buffers = 0;
3834      result.output_buffers = NULL;
3835      result.partial_result = requestIter->partial_result_cnt;
3836  
3837      {
3838          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3839          if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3840              // Notify HDR+ client about the partial metadata.
3841              gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3842              result.partial_result == PARTIAL_RESULT_COUNT);
3843          }
3844      }
3845  
3846      orchestrateResult(&result);
3847      LOGD("urgent frame_number = %u", result.frame_number);
3848      free_camera_metadata((camera_metadata_t *)result.result);
3849  }
3850  
3851  /*===========================================================================
3852   * FUNCTION   : handleMetadataWithLock
3853   *
3854   * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3855   *
3856   * PARAMETERS : @metadata_buf: metadata buffer
3857   *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3858   *                 the meta buf in this method
3859   *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3860   *                  last urgent metadata in a batch. Always true for non-batch mode
3861   *              @lastMetadataInBatch: Boolean to indicate whether this is the
3862   *                  last metadata in a batch. Always true for non-batch mode
3863   *              @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3864   *                  buffer is enqueued or not.
3865   *
3866   * RETURN     :
3867   *
3868   *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch,bool * p_is_metabuf_queued)3869  void QCamera3HardwareInterface::handleMetadataWithLock(
3870      mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3871      bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3872      bool *p_is_metabuf_queued)
3873  {
3874      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
3875      if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3876          //during flush do not send metadata from this thread
3877          LOGD("not sending metadata during flush or when mState is error");
3878          if (free_and_bufdone_meta_buf) {
3879              mMetadataChannel->bufDone(metadata_buf);
3880              free(metadata_buf);
3881          }
3882          return;
3883      }
3884  
3885      //not in flush
3886      metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3887      int32_t frame_number_valid, urgent_frame_number_valid;
3888      uint32_t frame_number, urgent_frame_number;
3889      int64_t capture_time, capture_time_av;
3890      nsecs_t currentSysTime;
3891  
3892      int32_t *p_frame_number_valid =
3893              POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3894      uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3895      int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3896      int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3897      int32_t *p_urgent_frame_number_valid =
3898              POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3899      uint32_t *p_urgent_frame_number =
3900              POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3901      IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3902              metadata) {
3903          LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3904                   *p_frame_number_valid, *p_frame_number);
3905      }
3906  
3907      camera_metadata_t *resultMetadata = nullptr;
3908  
3909      if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3910              (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3911          LOGE("Invalid metadata");
3912          if (free_and_bufdone_meta_buf) {
3913              mMetadataChannel->bufDone(metadata_buf);
3914              free(metadata_buf);
3915          }
3916          goto done_metadata;
3917      }
3918      frame_number_valid =        *p_frame_number_valid;
3919      frame_number =              *p_frame_number;
3920      capture_time =              *p_capture_time;
3921      capture_time_av =           *p_capture_time_av;
3922      urgent_frame_number_valid = *p_urgent_frame_number_valid;
3923      urgent_frame_number =       *p_urgent_frame_number;
3924      currentSysTime =            systemTime(CLOCK_MONOTONIC);
3925  
3926      if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3927          const int tries = 3;
3928          nsecs_t bestGap, measured;
3929          for (int i = 0; i < tries; ++i) {
3930              const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3931              const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3932              const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3933              const nsecs_t gap = tmono2 - tmono;
3934              if (i == 0 || gap < bestGap) {
3935                  bestGap = gap;
3936                  measured = tbase - ((tmono + tmono2) >> 1);
3937              }
3938          }
3939          capture_time -= measured;
3940      }
3941  
3942      // Detect if buffers from any requests are overdue
3943      for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3944          int64_t timeout;
3945          {
3946              Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3947              // If there is a pending HDR+ request, the following requests may be blocked until the
3948              // HDR+ request is done. So allow a longer timeout.
3949              timeout = (mHdrPlusPendingRequests.size() > 0) ?
3950                      MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3951              timeout = s2ns(timeout);
3952              if (timeout < mExpectedInflightDuration) {
3953                  timeout = mExpectedInflightDuration;
3954              }
3955          }
3956  
3957          if ((currentSysTime - req.timestamp) > timeout) {
3958              for (auto &missed : req.mPendingBufferList) {
3959                  assert(missed.stream->priv);
3960                  if (missed.stream->priv) {
3961                      QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3962                      assert(ch->mStreams[0]);
3963                      if (ch->mStreams[0]) {
3964                          LOGE("Cancel missing frame = %d, buffer = %p,"
3965                              "stream type = %d, stream format = %d",
3966                              req.frame_number, missed.buffer,
3967                              ch->mStreams[0]->getMyType(), missed.stream->format);
3968                          ch->timeoutFrame(req.frame_number);
3969                      }
3970                  }
3971              }
3972          }
3973      }
3974      //For the very first metadata callback, regardless whether it contains valid
3975      //frame number, send the partial metadata for the jumpstarting requests.
3976      //Note that this has to be done even if the metadata doesn't contain valid
3977      //urgent frame number, because in the case only 1 request is ever submitted
3978      //to HAL, there won't be subsequent valid urgent frame number.
3979      if (mFirstMetadataCallback) {
3980          for (pendingRequestIterator i =
3981                  mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3982              if (i->bUseFirstPartial) {
3983                  sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3984                          true /*isJumpstartMetadata*/);
3985              }
3986          }
3987          mFirstMetadataCallback = false;
3988      }
3989  
3990      //Partial result on process_capture_result for timestamp
3991      if (urgent_frame_number_valid) {
3992          LOGD("valid urgent frame_number = %u", urgent_frame_number);
3993  
3994          //Recieved an urgent Frame Number, handle it
3995          //using partial results
3996          for (pendingRequestIterator i =
3997                  mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3998              LOGD("Iterator Frame = %d urgent frame = %d",
3999                   i->frame_number, urgent_frame_number);
4000  
4001              if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
4002                      (i->partial_result_cnt == 0)) {
4003                  LOGE("Error: HAL missed urgent metadata for frame number %d",
4004                           i->frame_number);
4005                  i->partialResultDropped = true;
4006                  i->partial_result_cnt++;
4007              }
4008  
4009              if (i->frame_number == urgent_frame_number &&
4010                       i->partial_result_cnt == 0) {
4011                  sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4012                          false /*isJumpstartMetadata*/);
4013                  if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
4014                      // Instant AEC settled for this frame.
4015                      LOGH("instant AEC settled for frame number %d", urgent_frame_number);
4016                      mInstantAECSettledFrameNumber = urgent_frame_number;
4017                  }
4018                  break;
4019              }
4020          }
4021      }
4022  
4023      if (!frame_number_valid) {
4024          LOGD("Not a valid normal frame number, used as SOF only");
4025          if (free_and_bufdone_meta_buf) {
4026              mMetadataChannel->bufDone(metadata_buf);
4027              free(metadata_buf);
4028          }
4029          goto done_metadata;
4030      }
4031      LOGH("valid frame_number = %u, capture_time = %lld",
4032              frame_number, capture_time);
4033  
4034      handleDepthDataLocked(metadata->depth_data, frame_number,
4035              metadata->is_depth_data_valid);
4036  
4037      // Check whether any stream buffer corresponding to this is dropped or not
4038      // If dropped, then send the ERROR_BUFFER for the corresponding stream
4039      // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
4040      for (auto & pendingRequest : mPendingRequestsList) {
4041          if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
4042                      mInstantAECSettledFrameNumber)) {
4043              camera3_notify_msg_t notify_msg = {};
4044              for (auto & buffer : pendingRequest.buffers) {
4045                  bool dropFrame = false;
4046                  QCamera3ProcessingChannel *channel =
4047                          (QCamera3ProcessingChannel *)buffer.stream->priv;
4048                  uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4049                  if (p_cam_frame_drop) {
4050                      for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
4051                          if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
4052                              // Got the stream ID for drop frame.
4053                              dropFrame = true;
4054                              break;
4055                          }
4056                      }
4057                  } else {
4058                      // This is instant AEC case.
4059                      // For instant AEC drop the stream untill AEC is settled.
4060                      dropFrame = true;
4061                  }
4062  
4063                  if (dropFrame) {
4064                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
4065                      if (p_cam_frame_drop) {
4066                          // Treat msg as error for system buffer drops
4067                          LOGE("Start of reporting error frame#=%u, streamID=%u",
4068                                   pendingRequest.frame_number, streamID);
4069                      } else {
4070                          // For instant AEC, inform frame drop and frame number
4071                          LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
4072                                  "AEC settled frame number = %u",
4073                                  pendingRequest.frame_number, streamID,
4074                                  mInstantAECSettledFrameNumber);
4075                      }
4076                      notify_msg.type = CAMERA3_MSG_ERROR;
4077                      notify_msg.message.error.frame_number = pendingRequest.frame_number;
4078                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
4079                      notify_msg.message.error.error_stream = buffer.stream;
4080                      orchestrateNotify(&notify_msg);
4081                      if (p_cam_frame_drop) {
4082                          // Treat msg as error for system buffer drops
4083                          LOGE("End of reporting error frame#=%u, streamID=%u",
4084                                  pendingRequest.frame_number, streamID);
4085                      } else {
4086                          // For instant AEC, inform frame drop and frame number
4087                          LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
4088                                  "AEC settled frame number = %u",
4089                                  pendingRequest.frame_number, streamID,
4090                                  mInstantAECSettledFrameNumber);
4091                      }
4092                      PendingFrameDropInfo PendingFrameDrop;
4093                      PendingFrameDrop.frame_number = pendingRequest.frame_number;
4094                      PendingFrameDrop.stream_ID = streamID;
4095                      // Add the Frame drop info to mPendingFrameDropList
4096                      mPendingFrameDropList.push_back(PendingFrameDrop);
4097                  }
4098              }
4099          }
4100      }
4101  
4102      for (auto & pendingRequest : mPendingRequestsList) {
4103          // Find the pending request with the frame number.
4104          if (pendingRequest.frame_number < frame_number) {
4105              // Workaround for case where shutter is missing due to dropped
4106              // metadata
4107              if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
4108                  mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
4109              }
4110          } else if (pendingRequest.frame_number == frame_number) {
4111              // Update the sensor timestamp.
4112              pendingRequest.timestamp = capture_time;
4113  
4114  
4115              /* Set the timestamp in display metadata so that clients aware of
4116                 private_handle such as VT can use this un-modified timestamps.
4117                 Camera framework is unaware of this timestamp and cannot change this */
4118              updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
4119  
4120              // Find channel requiring metadata, meaning internal offline postprocess
4121              // is needed.
4122              //TODO: for now, we don't support two streams requiring metadata at the same time.
4123              // (because we are not making copies, and metadata buffer is not reference counted.
4124              bool internalPproc = false;
4125              for (pendingBufferIterator iter = pendingRequest.buffers.begin();
4126                      iter != pendingRequest.buffers.end(); iter++) {
4127                  if (iter->need_metadata) {
4128                      internalPproc = true;
4129                      QCamera3ProcessingChannel *channel =
4130                              (QCamera3ProcessingChannel *)iter->stream->priv;
4131                      channel->queueReprocMetadata(metadata_buf);
4132                      if(p_is_metabuf_queued != NULL) {
4133                          *p_is_metabuf_queued = true;
4134                      }
4135                      iter->need_metadata = false;
4136                      break;
4137                  }
4138              }
4139              for (auto itr = pendingRequest.internalRequestList.begin();
4140                    itr != pendingRequest.internalRequestList.end(); itr++) {
4141                  if (itr->need_metadata) {
4142                      internalPproc = true;
4143                      QCamera3ProcessingChannel *channel =
4144                              (QCamera3ProcessingChannel *)itr->stream->priv;
4145                      channel->queueReprocMetadata(metadata_buf);
4146                      break;
4147                  }
4148              }
4149  
4150              saveExifParams(metadata);
4151  
4152              bool *enableZsl = nullptr;
4153              if (gExposeEnableZslKey) {
4154                  enableZsl = &pendingRequest.enableZsl;
4155              }
4156  
4157              resultMetadata = translateFromHalMetadata(metadata,
4158                      pendingRequest, internalPproc,
4159                      lastMetadataInBatch, enableZsl);
4160  
4161              updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
4162  
4163              if (pendingRequest.blob_request) {
4164                  //Dump tuning metadata if enabled and available
4165                  char prop[PROPERTY_VALUE_MAX];
4166                  memset(prop, 0, sizeof(prop));
4167                  property_get("persist.camera.dumpmetadata", prop, "0");
4168                  int32_t enabled = atoi(prop);
4169                  if (enabled && metadata->is_tuning_params_valid) {
4170                      dumpMetadataToFile(metadata->tuning_params,
4171                             mMetaFrameCount,
4172                             enabled,
4173                             "Snapshot",
4174                             frame_number);
4175                  }
4176              }
4177  
4178              if (!internalPproc) {
4179                  LOGD("couldn't find need_metadata for this metadata");
4180                  // Return metadata buffer
4181                  if (free_and_bufdone_meta_buf) {
4182                      mMetadataChannel->bufDone(metadata_buf);
4183                      free(metadata_buf);
4184                  }
4185              }
4186  
4187              break;
4188          }
4189      }
4190  
4191      mShutterDispatcher.markShutterReady(frame_number, capture_time);
4192  
4193      // Try to send out capture result metadata.
4194      handlePendingResultMetadataWithLock(frame_number,  resultMetadata);
4195      return;
4196  
4197  done_metadata:
4198      for (pendingRequestIterator i = mPendingRequestsList.begin();
4199              i != mPendingRequestsList.end() ;i++) {
4200          i->pipeline_depth++;
4201      }
4202      LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4203      unblockRequestIfNecessary();
4204  }
4205  
4206  /*===========================================================================
4207   * FUNCTION   : handleDepthDataWithLock
4208   *
4209   * DESCRIPTION: Handles incoming depth data
4210   *
4211   * PARAMETERS : @depthData  : Depth data
4212   *              @frameNumber: Frame number of the incoming depth data
4213   *              @valid      : Valid flag for the incoming data
4214   *
4215   * RETURN     :
4216   *
4217   *==========================================================================*/
handleDepthDataLocked(const cam_depth_data_t & depthData,uint32_t frameNumber,uint8_t valid)4218  void QCamera3HardwareInterface::handleDepthDataLocked(
4219          const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
4220      uint32_t currentFrameNumber;
4221      buffer_handle_t *depthBuffer;
4222  
4223      if (nullptr == mDepthChannel) {
4224          return;
4225      }
4226  
4227      camera3_stream_buffer_t resultBuffer =
4228          {.acquire_fence = -1,
4229           .release_fence = -1,
4230           .status = CAMERA3_BUFFER_STATUS_OK,
4231           .buffer = nullptr,
4232           .stream = mDepthChannel->getStream()};
4233      do {
4234          depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
4235          if (nullptr == depthBuffer) {
4236              break;
4237          }
4238  
4239          resultBuffer.buffer = depthBuffer;
4240          if (currentFrameNumber == frameNumber) {
4241              if (valid) {
4242                  int32_t rc = mDepthChannel->populateDepthData(depthData,
4243                          frameNumber);
4244                  if (NO_ERROR != rc) {
4245                      resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4246                  } else {
4247                      resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
4248                  }
4249              } else {
4250                  resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4251              }
4252          } else if (currentFrameNumber > frameNumber) {
4253              break;
4254          } else {
4255              camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
4256                      {{currentFrameNumber, mDepthChannel->getStream(),
4257                              CAMERA3_MSG_ERROR_BUFFER}}};
4258              orchestrateNotify(&notify_msg);
4259  
4260              LOGE("Depth buffer for frame number: %d is missing "
4261                      "returning back!", currentFrameNumber);
4262              resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4263          }
4264          mDepthChannel->unmapBuffer(currentFrameNumber);
4265          mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4266      } while (currentFrameNumber < frameNumber);
4267  }
4268  
4269  /*===========================================================================
4270   * FUNCTION   : notifyErrorFoPendingDepthData
4271   *
4272   * DESCRIPTION: Returns error for any pending depth buffers
4273   *
4274   * PARAMETERS : depthCh - depth channel that needs to get flushed
4275   *
4276   * RETURN     :
4277   *
4278   *==========================================================================*/
notifyErrorFoPendingDepthData(QCamera3DepthChannel * depthCh)4279  void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4280          QCamera3DepthChannel *depthCh) {
4281      uint32_t currentFrameNumber;
4282      buffer_handle_t *depthBuffer;
4283  
4284      if (nullptr == depthCh) {
4285          return;
4286      }
4287  
4288      camera3_notify_msg_t notify_msg =
4289          {.type = CAMERA3_MSG_ERROR,
4290                  {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4291      camera3_stream_buffer_t resultBuffer =
4292          {.acquire_fence = -1,
4293           .release_fence = -1,
4294           .buffer = nullptr,
4295           .stream = depthCh->getStream(),
4296           .status = CAMERA3_BUFFER_STATUS_ERROR};
4297  
4298      while (nullptr !=
4299              (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4300          depthCh->unmapBuffer(currentFrameNumber);
4301  
4302          notify_msg.message.error.frame_number = currentFrameNumber;
4303          orchestrateNotify(&notify_msg);
4304  
4305          mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4306      };
4307  }
4308  
4309  /*===========================================================================
4310   * FUNCTION   : hdrPlusPerfLock
4311   *
4312   * DESCRIPTION: perf lock for HDR+ using custom intent
4313   *
4314   * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4315   *
4316   * RETURN     : None
4317   *
4318   *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)4319  void QCamera3HardwareInterface::hdrPlusPerfLock(
4320          mm_camera_super_buf_t *metadata_buf)
4321  {
4322      if (NULL == metadata_buf) {
4323          LOGE("metadata_buf is NULL");
4324          return;
4325      }
4326      metadata_buffer_t *metadata =
4327              (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4328      int32_t *p_frame_number_valid =
4329              POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4330      uint32_t *p_frame_number =
4331              POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4332  
4333      if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4334          LOGE("%s: Invalid metadata", __func__);
4335          return;
4336      }
4337  
4338      //acquire perf lock for 2 secs after the last HDR frame is captured
4339      constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
4340      if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4341          if ((p_frame_number != NULL) &&
4342                  (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
4343              mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
4344          }
4345      }
4346  }
4347  
4348  /*===========================================================================
4349   * FUNCTION   : handleInputBufferWithLock
4350   *
4351   * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4352   *
4353   * PARAMETERS : @frame_number: frame number of the input buffer
4354   *
4355   * RETURN     :
4356   *
4357   *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)4358  void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4359  {
4360      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
4361      pendingRequestIterator i = mPendingRequestsList.begin();
4362      while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4363          i++;
4364      }
4365      if (i != mPendingRequestsList.end() && i->input_buffer) {
4366          //found the right request
4367          CameraMetadata settings;
4368          nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4369          if(i->settings) {
4370              settings = i->settings;
4371              if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4372                  capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
4373              } else {
4374                  LOGE("No timestamp in input settings! Using current one.");
4375              }
4376          } else {
4377              LOGE("Input settings missing!");
4378          }
4379  
4380          mShutterDispatcher.markShutterReady(frame_number, capture_time);
4381          LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4382                      i->frame_number, capture_time);
4383  
4384          camera3_capture_result result;
4385          memset(&result, 0, sizeof(camera3_capture_result));
4386          result.frame_number = frame_number;
4387          result.result = i->settings;
4388          result.input_buffer = i->input_buffer;
4389          result.partial_result = PARTIAL_RESULT_COUNT;
4390  
4391          orchestrateResult(&result);
4392          LOGD("Input request metadata and input buffer frame_number = %u",
4393                          i->frame_number);
4394          i = erasePendingRequest(i);
4395  
4396          // Dispatch result metadata that may be just unblocked by this reprocess result.
4397          dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
4398      } else {
4399          LOGE("Could not find input request for frame number %d", frame_number);
4400      }
4401  }
4402  
4403  /*===========================================================================
4404   * FUNCTION   : handleBufferWithLock
4405   *
4406   * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4407   *
4408   * PARAMETERS : @buffer: image buffer for the callback
4409   *              @frame_number: frame number of the image buffer
4410   *
4411   * RETURN     :
4412   *
4413   *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)4414  void QCamera3HardwareInterface::handleBufferWithLock(
4415      camera3_stream_buffer_t *buffer, uint32_t frame_number)
4416  {
4417      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
4418  
4419      if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4420          mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4421      }
4422  
4423      /* Nothing to be done during error state */
4424      if ((ERROR == mState) || (DEINIT == mState)) {
4425          return;
4426      }
4427      if (mFlushPerf) {
4428          handleBuffersDuringFlushLock(buffer);
4429          return;
4430      }
4431      //not in flush
4432      // If the frame number doesn't exist in the pending request list,
4433      // directly send the buffer to the frameworks, and update pending buffers map
4434      // Otherwise, book-keep the buffer.
4435      pendingRequestIterator i = mPendingRequestsList.begin();
4436      while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4437          i++;
4438      }
4439  
4440      if (i != mPendingRequestsList.end()) {
4441          if (i->input_buffer) {
4442              // For a reprocessing request, try to send out result metadata.
4443              handlePendingResultMetadataWithLock(frame_number, nullptr);
4444          }
4445      }
4446  
4447      // Check if this frame was dropped.
4448      for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4449              m != mPendingFrameDropList.end(); m++) {
4450          QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4451          uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4452          if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4453              buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4454              LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4455                       frame_number, streamID);
4456              m = mPendingFrameDropList.erase(m);
4457              break;
4458          }
4459      }
4460  
4461      // WAR for encoder avtimer timestamp issue
4462      QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4463      if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4464          m_bAVTimerEnabled) {
4465          for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4466              req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4467              if (req->frame_number != frame_number)
4468                  continue;
4469              if(req->av_timestamp == 0) {
4470                  buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4471              }
4472              else {
4473                  struct private_handle_t *priv_handle =
4474                      (struct private_handle_t *) (*(buffer->buffer));
4475                  setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4476              }
4477          }
4478      }
4479  
4480      buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4481      LOGH("result frame_number = %d, buffer = %p",
4482               frame_number, buffer->buffer);
4483  
4484      mPendingBuffersMap.removeBuf(buffer->buffer);
4485      mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4486  
4487      if (mPreviewStarted == false) {
4488          QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4489          if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
4490              logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4491  
4492              mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4493              mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4494              mPreviewStarted = true;
4495  
4496              // Set power hint for preview
4497              mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4498          }
4499      }
4500  }
4501  
removeUnrequestedMetadata(pendingRequestIterator requestIter,camera_metadata_t * resultMetadata)4502  void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4503          camera_metadata_t *resultMetadata) {
4504      CameraMetadata metadata;
4505      metadata.acquire(resultMetadata);
4506  
4507      // Remove len shading map if it's not requested.
4508      if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4509              metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4510              metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0] !=
4511              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4512          metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4513          metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4514              &requestIter->requestedLensShadingMapMode, 1);
4515      }
4516  
4517      // Remove face information if it's not requested.
4518      if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4519              metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4520              metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4521              ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4522          metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4523          metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4524                  &requestIter->requestedFaceDetectMode, 1);
4525      }
4526  
4527      requestIter->resultMetadata = metadata.release();
4528  }
4529  
handlePendingResultMetadataWithLock(uint32_t frameNumber,camera_metadata_t * resultMetadata)4530  void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
4531          camera_metadata_t *resultMetadata)
4532  {
4533      // Find the pending request for this result metadata.
4534      auto requestIter = mPendingRequestsList.begin();
4535      while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4536          requestIter++;
4537      }
4538  
4539      if (requestIter == mPendingRequestsList.end()) {
4540          ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4541          return;
4542      }
4543  
4544      // Update the result metadata
4545      requestIter->resultMetadata = resultMetadata;
4546  
4547      // Check what type of request this is.
4548      bool liveRequest = false;
4549      if (requestIter->hdrplus) {
4550          // HDR+ request doesn't have partial results.
4551          requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4552      } else if (requestIter->input_buffer != nullptr) {
4553          // Reprocessing request result is the same as settings.
4554          requestIter->resultMetadata = requestIter->settings;
4555          // Reprocessing request doesn't have partial results.
4556          requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4557      } else {
4558          liveRequest = true;
4559          if ((requestIter->partial_result_cnt == 0) && !requestIter->partialResultDropped) {
4560              LOGE("Urgent metadata for frame number: %d didn't arrive!", frameNumber);
4561              requestIter->partialResultDropped = true;
4562          }
4563          requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4564          mPendingLiveRequest--;
4565  
4566          {
4567              std::unique_lock<std::mutex> l(gHdrPlusClientLock);
4568              // For a live request, send the metadata to HDR+ client.
4569              if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4570                  gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4571                      requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4572              }
4573          }
4574      }
4575  
4576      if (requestIter->input_buffer == nullptr) {
4577          removeUnrequestedMetadata(requestIter, resultMetadata);
4578      }
4579  
4580      dispatchResultMetadataWithLock(frameNumber, liveRequest);
4581  }
4582  
dispatchResultMetadataWithLock(uint32_t frameNumber,bool isLiveRequest)4583  void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4584          bool isLiveRequest) {
4585      // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4586      // to be sent if all previous pending requests are ready to be sent.
4587      bool readyToSend = true;
4588  
4589      // Iterate through the pending requests to send out result metadata that are ready. Also if
4590      // this result metadata belongs to a live request, notify errors for previous live requests
4591      // that don't have result metadata yet.
4592      auto iter = mPendingRequestsList.begin();
4593      while (iter != mPendingRequestsList.end()) {
4594          // Check if current pending request is ready. If it's not ready, the following pending
4595          // requests are also not ready.
4596          if (readyToSend && iter->resultMetadata == nullptr) {
4597              readyToSend = false;
4598          }
4599  
4600          bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4601          bool errorResult = false;
4602  
4603          camera3_capture_result_t result = {};
4604          result.frame_number = iter->frame_number;
4605          result.result = iter->resultMetadata;
4606          result.partial_result = iter->partial_result_cnt;
4607  
4608          // If this pending buffer has result metadata, we may be able to send out shutter callback
4609          // and result metadata.
4610          if (iter->resultMetadata != nullptr) {
4611              if (!readyToSend) {
4612                  // If any of the previous pending request is not ready, this pending request is
4613                  // also not ready to send in order to keep shutter callbacks and result metadata
4614                  // in order.
4615                  iter++;
4616                  continue;
4617              }
4618              // Notify ERROR_RESULT if partial result was dropped.
4619              errorResult = iter->partialResultDropped;
4620          } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
4621              // If the result metadata belongs to a live request, notify errors for previous pending
4622              // live requests.
4623              mPendingLiveRequest--;
4624  
4625              LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4626              errorResult = true;
4627          } else {
4628              iter++;
4629              continue;
4630          }
4631  
4632          if (errorResult) {
4633              // Check for any buffers that might be stuck in the post-process input queue
4634              // awaiting metadata and queue an empty meta buffer. The invalid data should
4635              // fail the offline post-process pass and return any buffers that otherwise
4636              // will become lost.
4637              for (auto it = iter->buffers.begin(); it != iter->buffers.end(); it++) {
4638                  if (it->need_metadata) {
4639                      QCamera3ProcessingChannel *channel =
4640                          reinterpret_cast<QCamera3ProcessingChannel *> (it->stream->priv);
4641                      if (channel != nullptr) {
4642                          LOGE("Dropped result: %d Unblocking any pending pp buffers!",
4643                                  iter->frame_number);
4644                          channel->queueReprocMetadata(nullptr);
4645                      }
4646                      it->need_metadata = false;
4647                      break;
4648                  }
4649              }
4650  
4651              notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4652          } else {
4653              result.output_buffers = nullptr;
4654              result.num_output_buffers = 0;
4655              orchestrateResult(&result);
4656          }
4657          // For reprocessing, result metadata is the same as settings so do not free it here to
4658          // avoid double free.
4659          if (result.result != iter->settings) {
4660              free_camera_metadata((camera_metadata_t *)result.result);
4661          }
4662          iter->resultMetadata = nullptr;
4663          iter = erasePendingRequest(iter);
4664      }
4665  
4666      if (isLiveRequest) {
4667          for (auto &iter : mPendingRequestsList) {
4668              // Increment pipeline depth for the following pending requests.
4669              if (iter.frame_number > frameNumber) {
4670                  iter.pipeline_depth++;
4671              }
4672          }
4673      }
4674  
4675      unblockRequestIfNecessary();
4676  }
4677  
4678  /*===========================================================================
4679   * FUNCTION   : unblockRequestIfNecessary
4680   *
4681   * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4682   *              that mMutex is held when this function is called.
4683   *
4684   * PARAMETERS :
4685   *
4686   * RETURN     :
4687   *
4688   *==========================================================================*/
unblockRequestIfNecessary()4689  void QCamera3HardwareInterface::unblockRequestIfNecessary()
4690  {
4691     // Unblock process_capture_request
4692     pthread_cond_signal(&mRequestCond);
4693  }
4694  
4695  /*===========================================================================
4696   * FUNCTION   : isHdrSnapshotRequest
4697   *
4698   * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4699   *
4700   * PARAMETERS : camera3 request structure
4701   *
4702   * RETURN     : boolean decision variable
4703   *
4704   *==========================================================================*/
isHdrSnapshotRequest(camera3_capture_request * request)4705  bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4706  {
4707      if (request == NULL) {
4708          LOGE("Invalid request handle");
4709          assert(0);
4710          return false;
4711      }
4712  
4713      if (!mForceHdrSnapshot) {
4714          CameraMetadata frame_settings;
4715          frame_settings = request->settings;
4716  
4717          if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4718              uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4719              if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4720                  return false;
4721              }
4722          } else {
4723              return false;
4724          }
4725  
4726          if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4727              uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4728              if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4729                  return false;
4730              }
4731          } else {
4732              return false;
4733          }
4734      }
4735  
4736      for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4737          if (request->output_buffers[i].stream->format
4738                  == HAL_PIXEL_FORMAT_BLOB) {
4739              return true;
4740          }
4741      }
4742  
4743      return false;
4744  }
4745  /*===========================================================================
4746   * FUNCTION   : orchestrateRequest
4747   *
4748   * DESCRIPTION: Orchestrates a capture request from camera service
4749   *
4750   * PARAMETERS :
4751   *   @request : request from framework to process
4752   *
4753   * RETURN     : Error status codes
4754   *
4755   *==========================================================================*/
orchestrateRequest(camera3_capture_request_t * request)4756  int32_t QCamera3HardwareInterface::orchestrateRequest(
4757          camera3_capture_request_t *request)
4758  {
4759  
4760      uint32_t originalFrameNumber = request->frame_number;
4761      uint32_t originalOutputCount = request->num_output_buffers;
4762      const camera_metadata_t *original_settings = request->settings;
4763      List<InternalRequest> internallyRequestedStreams;
4764      List<InternalRequest> emptyInternalList;
4765  
4766      if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4767          LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4768          uint32_t internalFrameNumber;
4769          CameraMetadata modified_meta;
4770  
4771  
4772          /* Add Blob channel to list of internally requested streams */
4773          for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4774              if (request->output_buffers[i].stream->format
4775                      == HAL_PIXEL_FORMAT_BLOB) {
4776                  InternalRequest streamRequested;
4777                  streamRequested.meteringOnly = 1;
4778                  streamRequested.need_metadata = 0;
4779                  streamRequested.stream = request->output_buffers[i].stream;
4780                  internallyRequestedStreams.push_back(streamRequested);
4781              }
4782          }
4783          request->num_output_buffers = 0;
4784          auto itr =  internallyRequestedStreams.begin();
4785  
4786          /* Modify setting to set compensation */
4787          modified_meta = request->settings;
4788          int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4789          uint8_t aeLock = 1;
4790          modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4791          modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4792          camera_metadata_t *modified_settings = modified_meta.release();
4793          request->settings = modified_settings;
4794  
4795          /* Capture Settling & -2x frame */
4796          _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4797          request->frame_number = internalFrameNumber;
4798          processCaptureRequest(request, internallyRequestedStreams);
4799  
4800          request->num_output_buffers = originalOutputCount;
4801          _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4802          request->frame_number = internalFrameNumber;
4803          processCaptureRequest(request, emptyInternalList);
4804          request->num_output_buffers = 0;
4805  
4806          modified_meta = modified_settings;
4807          expCompensation = 0;
4808          aeLock = 1;
4809          modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4810          modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4811          modified_settings = modified_meta.release();
4812          request->settings = modified_settings;
4813  
4814          /* Capture Settling & 0X frame */
4815  
4816          itr =  internallyRequestedStreams.begin();
4817          if (itr == internallyRequestedStreams.end()) {
4818              LOGE("Error Internally Requested Stream list is empty");
4819              assert(0);
4820          } else {
4821              itr->need_metadata = 0;
4822              itr->meteringOnly = 1;
4823          }
4824  
4825          _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4826          request->frame_number = internalFrameNumber;
4827          processCaptureRequest(request, internallyRequestedStreams);
4828  
4829          itr =  internallyRequestedStreams.begin();
4830          if (itr == internallyRequestedStreams.end()) {
4831              ALOGE("Error Internally Requested Stream list is empty");
4832              assert(0);
4833          } else {
4834              itr->need_metadata = 1;
4835              itr->meteringOnly = 0;
4836          }
4837  
4838          _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4839          request->frame_number = internalFrameNumber;
4840          processCaptureRequest(request, internallyRequestedStreams);
4841  
4842          /* Capture 2X frame*/
4843          modified_meta = modified_settings;
4844          expCompensation = GB_HDR_2X_STEP_EV;
4845          aeLock = 1;
4846          modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4847          modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4848          modified_settings = modified_meta.release();
4849          request->settings = modified_settings;
4850  
4851          itr =  internallyRequestedStreams.begin();
4852          if (itr == internallyRequestedStreams.end()) {
4853              ALOGE("Error Internally Requested Stream list is empty");
4854              assert(0);
4855          } else {
4856              itr->need_metadata = 0;
4857              itr->meteringOnly = 1;
4858          }
4859          _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4860          request->frame_number = internalFrameNumber;
4861          processCaptureRequest(request, internallyRequestedStreams);
4862  
4863          itr =  internallyRequestedStreams.begin();
4864          if (itr == internallyRequestedStreams.end()) {
4865              ALOGE("Error Internally Requested Stream list is empty");
4866              assert(0);
4867          } else {
4868              itr->need_metadata = 1;
4869              itr->meteringOnly = 0;
4870          }
4871  
4872          _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4873          request->frame_number = internalFrameNumber;
4874          processCaptureRequest(request, internallyRequestedStreams);
4875  
4876  
4877          /* Capture 2X on original streaming config*/
4878          internallyRequestedStreams.clear();
4879  
4880          /* Restore original settings pointer */
4881          request->settings = original_settings;
4882      } else {
4883          uint32_t internalFrameNumber;
4884          _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4885          request->frame_number = internalFrameNumber;
4886          return processCaptureRequest(request, internallyRequestedStreams);
4887      }
4888  
4889      return NO_ERROR;
4890  }
4891  
4892  /*===========================================================================
4893   * FUNCTION   : orchestrateResult
4894   *
4895   * DESCRIPTION: Orchestrates a capture result to camera service
4896   *
4897   * PARAMETERS :
4898   *   @request : request from framework to process
4899   *
4900   * RETURN     :
4901   *
4902   *==========================================================================*/
orchestrateResult(camera3_capture_result_t * result)4903  void QCamera3HardwareInterface::orchestrateResult(
4904                      camera3_capture_result_t *result)
4905  {
4906      uint32_t frameworkFrameNumber;
4907      int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4908              frameworkFrameNumber);
4909      if (rc != NO_ERROR) {
4910          LOGE("Cannot find translated frameworkFrameNumber");
4911          assert(0);
4912      } else {
4913          if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4914              LOGD("Internal Request drop the result");
4915          } else {
4916              if (result->result != NULL) {
4917                  camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4918                  camera_metadata_entry_t entry;
4919                  int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4920                  if (ret == OK) {
4921                      int64_t sync_frame_number = frameworkFrameNumber;
4922                      ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4923                      if (ret != OK)
4924                          LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
4925                  }
4926              }
4927              result->frame_number = frameworkFrameNumber;
4928              mCallbackOps->process_capture_result(mCallbackOps, result);
4929          }
4930      }
4931  }
4932  
4933  /*===========================================================================
4934   * FUNCTION   : orchestrateNotify
4935   *
4936   * DESCRIPTION: Orchestrates a notify to camera service
4937   *
4938   * PARAMETERS :
4939   *   @request : request from framework to process
4940   *
4941   * RETURN     :
4942   *
4943   *==========================================================================*/
orchestrateNotify(camera3_notify_msg_t * notify_msg)4944  void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4945  {
4946      uint32_t frameworkFrameNumber;
4947      uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
4948      int32_t rc = NO_ERROR;
4949  
4950      rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
4951                                                            frameworkFrameNumber);
4952  
4953      if (rc != NO_ERROR) {
4954          if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4955              LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4956              frameworkFrameNumber = 0;
4957          } else {
4958              LOGE("Cannot find translated frameworkFrameNumber");
4959              assert(0);
4960              return;
4961          }
4962      }
4963  
4964      if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4965          LOGD("Internal Request drop the notifyCb");
4966      } else {
4967          notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4968          mCallbackOps->notify(mCallbackOps, notify_msg);
4969      }
4970  }
4971  
4972  /*===========================================================================
4973   * FUNCTION   : FrameNumberRegistry
4974   *
4975   * DESCRIPTION: Constructor
4976   *
4977   * PARAMETERS :
4978   *
4979   * RETURN     :
4980   *
4981   *==========================================================================*/
FrameNumberRegistry()4982  FrameNumberRegistry::FrameNumberRegistry()
4983  {
4984      _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4985  }
4986  
4987  /*===========================================================================
4988   * FUNCTION   : ~FrameNumberRegistry
4989   *
4990   * DESCRIPTION: Destructor
4991   *
4992   * PARAMETERS :
4993   *
4994   * RETURN     :
4995   *
4996   *==========================================================================*/
~FrameNumberRegistry()4997  FrameNumberRegistry::~FrameNumberRegistry()
4998  {
4999  }
5000  
5001  /*===========================================================================
5002   * FUNCTION   : PurgeOldEntriesLocked
5003   *
5004   * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
5005   *
5006   * PARAMETERS :
5007   *
5008   * RETURN     : NONE
5009   *
5010   *==========================================================================*/
purgeOldEntriesLocked()5011  void FrameNumberRegistry::purgeOldEntriesLocked()
5012  {
5013      while (_register.begin() != _register.end()) {
5014          auto itr = _register.begin();
5015          if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
5016              _register.erase(itr);
5017          } else {
5018              return;
5019          }
5020      }
5021  }
5022  
5023  /*===========================================================================
5024   * FUNCTION   : allocStoreInternalFrameNumber
5025   *
5026   * DESCRIPTION: Method to note down a framework request and associate a new
5027   *              internal request number against it
5028   *
5029   * PARAMETERS :
5030   *   @fFrameNumber: Identifier given by framework
5031   *   @internalFN  : Output parameter which will have the newly generated internal
5032   *                  entry
5033   *
5034   * RETURN     : Error code
5035   *
5036   *==========================================================================*/
allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,uint32_t & internalFrameNumber)5037  int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
5038                                                              uint32_t &internalFrameNumber)
5039  {
5040      Mutex::Autolock lock(mRegistryLock);
5041      internalFrameNumber = _nextFreeInternalNumber++;
5042      LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
5043      _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
5044      purgeOldEntriesLocked();
5045      return NO_ERROR;
5046  }
5047  
5048  /*===========================================================================
5049   * FUNCTION   : generateStoreInternalFrameNumber
5050   *
5051   * DESCRIPTION: Method to associate a new internal request number independent
5052   *              of any associate with framework requests
5053   *
5054   * PARAMETERS :
5055   *   @internalFrame#: Output parameter which will have the newly generated internal
5056   *
5057   *
5058   * RETURN     : Error code
5059   *
5060   *==========================================================================*/
generateStoreInternalFrameNumber(uint32_t & internalFrameNumber)5061  int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
5062  {
5063      Mutex::Autolock lock(mRegistryLock);
5064      internalFrameNumber = _nextFreeInternalNumber++;
5065      LOGD("Generated internal framenumber:%d", internalFrameNumber);
5066      _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
5067      purgeOldEntriesLocked();
5068      return NO_ERROR;
5069  }
5070  
5071  /*===========================================================================
5072   * FUNCTION   : getFrameworkFrameNumber
5073   *
5074   * DESCRIPTION: Method to query the framework framenumber given an internal #
5075   *
5076   * PARAMETERS :
5077   *   @internalFrame#: Internal reference
5078   *   @frameworkframenumber: Output parameter holding framework frame entry
5079   *
5080   * RETURN     : Error code
5081   *
5082   *==========================================================================*/
getFrameworkFrameNumber(uint32_t internalFrameNumber,uint32_t & frameworkFrameNumber)5083  int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
5084                                                       uint32_t &frameworkFrameNumber)
5085  {
5086      Mutex::Autolock lock(mRegistryLock);
5087      auto itr = _register.find(internalFrameNumber);
5088      if (itr == _register.end()) {
5089          LOGE("Cannot find internal#: %d", internalFrameNumber);
5090          return -ENOENT;
5091      }
5092  
5093      frameworkFrameNumber = itr->second;
5094      purgeOldEntriesLocked();
5095      return NO_ERROR;
5096  }
5097  
fillPbStreamConfig(pbcamera::StreamConfiguration * config,uint32_t pbStreamId,QCamera3Channel * channel,uint32_t streamIndex)5098  status_t QCamera3HardwareInterface::fillPbStreamConfig(
5099          pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
5100          uint32_t streamIndex) {
5101      if (config == nullptr) {
5102          LOGE("%s: config is null", __FUNCTION__);
5103          return BAD_VALUE;
5104      }
5105  
5106      if (channel == nullptr) {
5107          LOGE("%s: channel is null", __FUNCTION__);
5108          return BAD_VALUE;
5109      }
5110  
5111      QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
5112      if (stream == nullptr) {
5113          LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
5114          return NAME_NOT_FOUND;
5115      }
5116  
5117      const cam_stream_info_t* streamInfo = stream->getStreamInfo();
5118      if (streamInfo == nullptr) {
5119          LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
5120          return NAME_NOT_FOUND;
5121      }
5122  
5123      config->id = pbStreamId;
5124      config->image.width = streamInfo->dim.width;
5125      config->image.height = streamInfo->dim.height;
5126      config->image.padding = 0;
5127  
5128      int bytesPerPixel = 0;
5129  
5130      switch (streamInfo->fmt) {
5131          case CAM_FORMAT_YUV_420_NV21:
5132              config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5133              bytesPerPixel = 1;
5134              break;
5135          case CAM_FORMAT_YUV_420_NV12:
5136          case CAM_FORMAT_YUV_420_NV12_VENUS:
5137              config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5138              bytesPerPixel = 1;
5139              break;
5140          default:
5141              ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
5142              return BAD_VALUE;
5143      }
5144  
5145      uint32_t totalPlaneSize = 0;
5146  
5147      // Fill plane information.
5148      for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
5149          pbcamera::PlaneConfiguration plane;
5150          plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
5151          plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
5152          config->image.planes.push_back(plane);
5153  
5154          totalPlaneSize += (plane.stride * plane.scanline);
5155      }
5156  
5157      config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
5158      return OK;
5159  }
5160  
5161  /*===========================================================================
5162   * FUNCTION   : processCaptureRequest
5163   *
5164   * DESCRIPTION: process a capture request from camera service
5165   *
5166   * PARAMETERS :
5167   *   @request : request from framework to process
5168   *
5169   * RETURN     :
5170   *
5171   *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)5172  int QCamera3HardwareInterface::processCaptureRequest(
5173                      camera3_capture_request_t *request,
5174                      List<InternalRequest> &internallyRequestedStreams)
5175  {
5176      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
5177      int rc = NO_ERROR;
5178      int32_t request_id;
5179      CameraMetadata meta;
5180      bool isVidBufRequested = false;
5181      camera3_stream_buffer_t *pInputBuffer = NULL;
5182  
5183      pthread_mutex_lock(&mMutex);
5184  
5185      // Validate current state
5186      switch (mState) {
5187          case CONFIGURED:
5188          case STARTED:
5189              /* valid state */
5190              break;
5191  
5192          case ERROR:
5193              pthread_mutex_unlock(&mMutex);
5194              handleCameraDeviceError();
5195              return -ENODEV;
5196  
5197          default:
5198              LOGE("Invalid state %d", mState);
5199              pthread_mutex_unlock(&mMutex);
5200              return -ENODEV;
5201      }
5202  
5203      rc = validateCaptureRequest(request, internallyRequestedStreams);
5204      if (rc != NO_ERROR) {
5205          LOGE("incoming request is not valid");
5206          pthread_mutex_unlock(&mMutex);
5207          return rc;
5208      }
5209  
5210      meta = request->settings;
5211  
5212      if (mState == CONFIGURED) {
5213          logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
5214  
5215          // For HFR first capture request, send capture intent, and
5216          // stream on all streams
5217          if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) && mBatchSize) {
5218              int32_t hal_version = CAM_HAL_V3;
5219              uint8_t captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5220              clear_metadata_buffer(mParameters);
5221              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
5222              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
5223              rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
5224              if (rc < 0) {
5225                  LOGE("set_parms for for capture intent failed");
5226                  pthread_mutex_unlock(&mMutex);
5227                  return rc;
5228              }
5229          }
5230  
5231          uint8_t nrMode = 0;
5232          if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5233              nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5234          }
5235  
5236          cam_is_type_t is_type = IS_TYPE_NONE;
5237          bool setEis = isEISEnabled(meta);
5238          cam_sensor_mode_info_t sensorModeInfo = {};
5239          rc = getSensorModeInfo(sensorModeInfo);
5240          if (rc != NO_ERROR) {
5241              LOGE("Failed to get sensor output size");
5242              pthread_mutex_unlock(&mMutex);
5243              goto error_exit;
5244          }
5245  
5246          mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5247                  gCamCapability[mCameraId]->active_array_size.height,
5248                  sensorModeInfo.active_array_size.width,
5249                  sensorModeInfo.active_array_size.height);
5250  
5251          /* Set batchmode before initializing channel. Since registerBuffer
5252           * internally initializes some of the channels, better set batchmode
5253           * even before first register buffer */
5254          for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5255              it != mStreamInfo.end(); it++) {
5256              QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5257              if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5258                      && mBatchSize) {
5259                  rc = channel->setBatchSize(mBatchSize);
5260                  //Disable per frame map unmap for HFR/batchmode case
5261                  rc |= channel->setPerFrameMapUnmap(false);
5262                  if (NO_ERROR != rc) {
5263                      LOGE("Channel init failed %d", rc);
5264                      pthread_mutex_unlock(&mMutex);
5265                      goto error_exit;
5266                  }
5267              }
5268          }
5269  
5270          //First initialize all streams
5271          for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5272              it != mStreamInfo.end(); it++) {
5273              QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5274  
5275              /* Initial value of NR mode is needed before stream on */
5276              channel->setNRMode(nrMode);
5277              if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5278                 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
5279                 setEis) {
5280                  for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5281                      if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5282                          is_type = mStreamConfigInfo.is_type[i];
5283                          break;
5284                      }
5285                  }
5286                  rc = channel->initialize(is_type);
5287              } else {
5288                  rc = channel->initialize(IS_TYPE_NONE);
5289              }
5290              if (NO_ERROR != rc) {
5291                  LOGE("Channel initialization failed %d", rc);
5292                  pthread_mutex_unlock(&mMutex);
5293                  goto error_exit;
5294              }
5295          }
5296  
5297          if (mRawDumpChannel) {
5298              rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5299              if (rc != NO_ERROR) {
5300                  LOGE("Error: Raw Dump Channel init failed");
5301                  pthread_mutex_unlock(&mMutex);
5302                  goto error_exit;
5303              }
5304          }
5305          if (mHdrPlusRawSrcChannel) {
5306              rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5307              if (rc != NO_ERROR) {
5308                  LOGE("Error: HDR+ RAW Source Channel init failed");
5309                  pthread_mutex_unlock(&mMutex);
5310                  goto error_exit;
5311              }
5312          }
5313          if (mSupportChannel) {
5314              rc = mSupportChannel->initialize(IS_TYPE_NONE);
5315              if (rc < 0) {
5316                  LOGE("Support channel initialization failed");
5317                  pthread_mutex_unlock(&mMutex);
5318                  goto error_exit;
5319              }
5320          }
5321          if (mAnalysisChannel) {
5322              rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5323              if (rc < 0) {
5324                  LOGE("Analysis channel initialization failed");
5325                  pthread_mutex_unlock(&mMutex);
5326                  goto error_exit;
5327              }
5328          }
5329          if (mDummyBatchChannel) {
5330              rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5331              if (rc < 0) {
5332                  LOGE("mDummyBatchChannel setBatchSize failed");
5333                  pthread_mutex_unlock(&mMutex);
5334                  goto error_exit;
5335              }
5336              rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
5337              if (rc < 0) {
5338                  LOGE("mDummyBatchChannel initialization failed");
5339                  pthread_mutex_unlock(&mMutex);
5340                  goto error_exit;
5341              }
5342          }
5343  
5344          // Set bundle info
5345          rc = setBundleInfo();
5346          if (rc < 0) {
5347              LOGE("setBundleInfo failed %d", rc);
5348              pthread_mutex_unlock(&mMutex);
5349              goto error_exit;
5350          }
5351  
5352          //update settings from app here
5353          if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5354              mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5355              LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5356          }
5357          if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5358              mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5359              LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5360          }
5361          if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5362              mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5363              LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5364  
5365              if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5366                  (mLinkedCameraId != mCameraId) ) {
5367                  LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5368                      mLinkedCameraId, mCameraId);
5369                  pthread_mutex_unlock(&mMutex);
5370                  goto error_exit;
5371              }
5372          }
5373  
5374          // add bundle related cameras
5375          LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5376          if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5377              cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5378                      &m_pDualCamCmdPtr->bundle_info;
5379              m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
5380              if (mIsDeviceLinked)
5381                  m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5382              else
5383                  m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5384  
5385              pthread_mutex_lock(&gCamLock);
5386  
5387              if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5388                  LOGE("Dualcam: Invalid Session Id ");
5389                  pthread_mutex_unlock(&gCamLock);
5390                  pthread_mutex_unlock(&mMutex);
5391                  goto error_exit;
5392              }
5393  
5394              if (mIsMainCamera == 1) {
5395                  m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5396                  m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
5397                  m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5398                  m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
5399                  // related session id should be session id of linked session
5400                  m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5401              } else {
5402                  m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5403                  m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
5404                  m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5405                  m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
5406                  m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5407              }
5408              m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
5409              pthread_mutex_unlock(&gCamLock);
5410  
5411              rc = mCameraHandle->ops->set_dual_cam_cmd(
5412                      mCameraHandle->camera_handle);
5413              if (rc < 0) {
5414                  LOGE("Dualcam: link failed");
5415                  pthread_mutex_unlock(&mMutex);
5416                  goto error_exit;
5417              }
5418          }
5419          goto no_error;
5420  error_exit:
5421          mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
5422          return rc;
5423  no_error:
5424          mWokenUpByDaemon = false;
5425          mPendingLiveRequest = 0;
5426          mFirstConfiguration = false;
5427      }
5428  
5429      uint32_t frameNumber = request->frame_number;
5430      cam_stream_ID_t streamsArray;
5431  
5432      if (mFlushPerf) {
5433          //we cannot accept any requests during flush
5434          LOGE("process_capture_request cannot proceed during flush");
5435          pthread_mutex_unlock(&mMutex);
5436          return NO_ERROR; //should return an error
5437      }
5438  
5439      if (meta.exists(ANDROID_REQUEST_ID)) {
5440          request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5441          mCurrentRequestId = request_id;
5442          LOGD("Received request with id: %d", request_id);
5443      } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5444          LOGE("Unable to find request id field, \
5445                  & no previous id available");
5446          pthread_mutex_unlock(&mMutex);
5447          return NAME_NOT_FOUND;
5448      } else {
5449          LOGD("Re-using old request id");
5450          request_id = mCurrentRequestId;
5451      }
5452  
5453      LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5454                                      request->num_output_buffers,
5455                                      request->input_buffer,
5456                                      frameNumber);
5457      // Acquire all request buffers first
5458      streamsArray.num_streams = 0;
5459      int blob_request = 0;
5460      bool depthRequestPresent = false;
5461      uint32_t snapshotStreamId = 0;
5462      for (size_t i = 0; i < request->num_output_buffers; i++) {
5463          const camera3_stream_buffer_t& output = request->output_buffers[i];
5464          QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5465  
5466          if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5467                  (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
5468              //FIXME??:Call function to store local copy of jpeg data for encode params.
5469              blob_request = 1;
5470              snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5471          }
5472  
5473          if (output.acquire_fence != -1) {
5474             rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5475             close(output.acquire_fence);
5476             if (rc != OK) {
5477                LOGE("sync wait failed %d", rc);
5478                pthread_mutex_unlock(&mMutex);
5479                return rc;
5480             }
5481          }
5482  
5483          if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5484                  (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
5485              depthRequestPresent = true;
5486              continue;
5487          }
5488  
5489          streamsArray.stream_request[streamsArray.num_streams++].streamID =
5490              channel->getStreamID(channel->getStreamTypeMask());
5491  
5492          if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5493              isVidBufRequested = true;
5494          }
5495      }
5496  
5497      //FIXME: Add checks to ensure to dups in validateCaptureRequest
5498      for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5499            itr++) {
5500          QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5501          streamsArray.stream_request[streamsArray.num_streams++].streamID =
5502              channel->getStreamID(channel->getStreamTypeMask());
5503  
5504          if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5505              isVidBufRequested = true;
5506          }
5507      }
5508  
5509      if (blob_request) {
5510          ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
5511          mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
5512      }
5513      if (blob_request && mRawDumpChannel) {
5514          LOGD("Trigger Raw based on blob request if Raw dump is enabled");
5515          streamsArray.stream_request[streamsArray.num_streams].streamID =
5516              mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
5517          streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5518      }
5519  
5520      {
5521          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5522          // Request a RAW buffer if
5523          //  1. mHdrPlusRawSrcChannel is valid.
5524          //  2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5525          //  3. There is no pending HDR+ request.
5526          if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5527                  mHdrPlusPendingRequests.size() == 0) {
5528              streamsArray.stream_request[streamsArray.num_streams].streamID =
5529                  mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5530              streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5531          }
5532      }
5533  
5534      //extract capture intent
5535      if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5536          mCaptureIntent =
5537                  meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5538      }
5539  
5540      if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5541          mCacMode =
5542                  meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5543      }
5544  
5545      uint8_t requestedLensShadingMapMode;
5546      // Get the shading map mode.
5547      if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5548          mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5549                  meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5550      } else {
5551          requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5552      }
5553  
5554      if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5555          mLastRequestedFaceDetectMode =
5556                  meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5557      }
5558  
5559      if (meta.exists(ANDROID_STATISTICS_OIS_DATA_MODE)) {
5560          mLastRequestedOisDataMode =
5561                  meta.find(ANDROID_STATISTICS_OIS_DATA_MODE).data.u8[0];
5562      }
5563  
5564      bool hdrPlusRequest = false;
5565      HdrPlusPendingRequest pendingHdrPlusRequest = {};
5566  
5567      {
5568          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5569          // If this request has a still capture intent, try to submit an HDR+ request.
5570          if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5571                  mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5572              hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5573          }
5574      }
5575  
5576      if (hdrPlusRequest) {
5577          // For a HDR+ request, just set the frame parameters.
5578          rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5579          if (rc < 0) {
5580              LOGE("fail to set frame parameters");
5581              pthread_mutex_unlock(&mMutex);
5582              return rc;
5583          }
5584      } else if(request->input_buffer == NULL) {
5585          /* Parse the settings:
5586           * - For every request in NORMAL MODE
5587           * - For every request in HFR mode during preview only case
5588           * - For first request of every batch in HFR mode during video
5589           * recording. In batchmode the same settings except frame number is
5590           * repeated in each request of the batch.
5591           */
5592          if (!mBatchSize ||
5593             (mBatchSize && !isVidBufRequested) ||
5594             (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
5595              rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5596              if (rc < 0) {
5597                  LOGE("fail to set frame parameters");
5598                  pthread_mutex_unlock(&mMutex);
5599                  return rc;
5600              }
5601  
5602              {
5603                  // If HDR+ mode is enabled, override the following modes so the necessary metadata
5604                  // will be included in the result metadata sent to Easel HDR+.
5605                  std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5606                  if (mHdrPlusModeEnabled) {
5607                      ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5608                          ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5609                      ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5610                          ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5611                  }
5612              }
5613          }
5614          /* For batchMode HFR, setFrameParameters is not called for every
5615           * request. But only frame number of the latest request is parsed.
5616           * Keep track of first and last frame numbers in a batch so that
5617           * metadata for the frame numbers of batch can be duplicated in
5618           * handleBatchMetadta */
5619          if (mBatchSize) {
5620              if (!mToBeQueuedVidBufs) {
5621                  //start of the batch
5622                  mFirstFrameNumberInBatch = request->frame_number;
5623              }
5624              if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5625                  CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5626                  LOGE("Failed to set the frame number in the parameters");
5627                  pthread_mutex_unlock(&mMutex);
5628                  return BAD_VALUE;
5629              }
5630          }
5631          if (mNeedSensorRestart) {
5632              /* Unlock the mutex as restartSensor waits on the channels to be
5633               * stopped, which in turn calls stream callback functions -
5634               * handleBufferWithLock and handleMetadataWithLock */
5635              pthread_mutex_unlock(&mMutex);
5636              rc = dynamicUpdateMetaStreamInfo();
5637              if (rc != NO_ERROR) {
5638                  LOGE("Restarting the sensor failed");
5639                  return BAD_VALUE;
5640              }
5641              mNeedSensorRestart = false;
5642              pthread_mutex_lock(&mMutex);
5643          }
5644          if(mResetInstantAEC) {
5645              ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5646                      CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5647              mResetInstantAEC = false;
5648          }
5649      } else {
5650          if (request->input_buffer->acquire_fence != -1) {
5651             rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5652             close(request->input_buffer->acquire_fence);
5653             if (rc != OK) {
5654                LOGE("input buffer sync wait failed %d", rc);
5655                pthread_mutex_unlock(&mMutex);
5656                return rc;
5657             }
5658          }
5659      }
5660  
5661      if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5662          mLastCustIntentFrmNum = frameNumber;
5663      }
5664      /* Update pending request list and pending buffers map */
5665      PendingRequestInfo pendingRequest = {};
5666      pendingRequestIterator latestRequest;
5667      pendingRequest.frame_number = frameNumber;
5668      pendingRequest.num_buffers = depthRequestPresent ?
5669              (request->num_output_buffers - 1 ) : request->num_output_buffers;
5670      pendingRequest.request_id = request_id;
5671      pendingRequest.blob_request = blob_request;
5672      pendingRequest.timestamp = 0;
5673      pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
5674      pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
5675      pendingRequest.requestedOisDataMode = mLastRequestedOisDataMode;
5676      if (request->input_buffer) {
5677          pendingRequest.input_buffer =
5678                  (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5679          *(pendingRequest.input_buffer) = *(request->input_buffer);
5680          pInputBuffer = pendingRequest.input_buffer;
5681      } else {
5682         pendingRequest.input_buffer = NULL;
5683         pInputBuffer = NULL;
5684      }
5685      pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
5686  
5687      pendingRequest.pipeline_depth = 0;
5688      pendingRequest.partial_result_cnt = 0;
5689      extractJpegMetadata(mCurJpegMeta, request);
5690      pendingRequest.jpegMetadata = mCurJpegMeta;
5691      pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5692      pendingRequest.capture_intent = mCaptureIntent;
5693      if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5694          pendingRequest.hybrid_ae_enable =
5695                  meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5696      }
5697  
5698      if (meta.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
5699          pendingRequest.motion_detection_enable =
5700                  meta.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8[0];
5701      }
5702  
5703      /* DevCamDebug metadata processCaptureRequest */
5704      if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5705          mDevCamDebugMetaEnable =
5706                  meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5707      }
5708      pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5709      /* DevCamDebug metadata end */
5710  
5711      //extract CAC info
5712      if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5713          mCacMode =
5714                  meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5715      }
5716      pendingRequest.fwkCacMode = mCacMode;
5717      pendingRequest.hdrplus = hdrPlusRequest;
5718      // We need to account for several dropped frames initially on sensor side.
5719      pendingRequest.expectedFrameDuration = (mState == CONFIGURED) ? (4 * mExpectedFrameDuration) :
5720          mExpectedFrameDuration;
5721      mExpectedInflightDuration += pendingRequest.expectedFrameDuration;
5722  
5723      // extract enableZsl info
5724      if (gExposeEnableZslKey) {
5725          if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5726              pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5727              mZslEnabled = pendingRequest.enableZsl;
5728          } else {
5729              pendingRequest.enableZsl = mZslEnabled;
5730          }
5731      }
5732  
5733      PendingBuffersInRequest bufsForCurRequest;
5734      bufsForCurRequest.frame_number = frameNumber;
5735      // Mark current timestamp for the new request
5736      bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
5737      bufsForCurRequest.av_timestamp = 0;
5738      bufsForCurRequest.hdrplus = hdrPlusRequest;
5739  
5740      if (hdrPlusRequest) {
5741          // Save settings for this request.
5742          pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5743          memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5744  
5745          // Add to pending HDR+ request queue.
5746          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5747          mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5748  
5749          ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5750      }
5751  
5752      for (size_t i = 0; i < request->num_output_buffers; i++) {
5753          if ((request->output_buffers[i].stream->data_space ==
5754                  HAL_DATASPACE_DEPTH) &&
5755                  (HAL_PIXEL_FORMAT_BLOB ==
5756                          request->output_buffers[i].stream->format)) {
5757              continue;
5758          }
5759          RequestedBufferInfo requestedBuf;
5760          memset(&requestedBuf, 0, sizeof(requestedBuf));
5761          requestedBuf.stream = request->output_buffers[i].stream;
5762          requestedBuf.buffer = NULL;
5763          pendingRequest.buffers.push_back(requestedBuf);
5764  
5765          // Add to buffer handle the pending buffers list
5766          PendingBufferInfo bufferInfo;
5767          bufferInfo.buffer = request->output_buffers[i].buffer;
5768          bufferInfo.stream = request->output_buffers[i].stream;
5769          bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5770          QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5771          LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5772              frameNumber, bufferInfo.buffer,
5773              channel->getStreamTypeMask(), bufferInfo.stream->format);
5774      }
5775      // Add this request packet into mPendingBuffersMap
5776      mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5777      LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5778          mPendingBuffersMap.get_num_overall_buffers());
5779  
5780      latestRequest = mPendingRequestsList.insert(
5781              mPendingRequestsList.end(), pendingRequest);
5782  
5783      // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5784      // for the frame number.
5785      mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
5786      for (size_t i = 0; i < request->num_output_buffers; i++) {
5787          mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5788      }
5789  
5790      if(mFlush) {
5791          LOGI("mFlush is true");
5792          pthread_mutex_unlock(&mMutex);
5793          return NO_ERROR;
5794      }
5795  
5796      // If this is not an HDR+ request, send the request to metadata and each output buffer's
5797      // channel.
5798      if (!hdrPlusRequest) {
5799          int indexUsed;
5800          // Notify metadata channel we receive a request
5801          mMetadataChannel->request(NULL, frameNumber, indexUsed);
5802  
5803          if(request->input_buffer != NULL){
5804              LOGD("Input request, frame_number %d", frameNumber);
5805              rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5806              if (NO_ERROR != rc) {
5807                  LOGE("fail to set reproc parameters");
5808                  pthread_mutex_unlock(&mMutex);
5809                  return rc;
5810              }
5811          }
5812  
5813          // Call request on other streams
5814          uint32_t streams_need_metadata = 0;
5815          pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5816          for (size_t i = 0; i < request->num_output_buffers; i++) {
5817              const camera3_stream_buffer_t& output = request->output_buffers[i];
5818              QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5819  
5820              if (channel == NULL) {
5821                  LOGW("invalid channel pointer for stream");
5822                  continue;
5823              }
5824  
5825              if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5826                  LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5827                            output.buffer, request->input_buffer, frameNumber);
5828                  if(request->input_buffer != NULL){
5829                      rc = channel->request(output.buffer, frameNumber,
5830                              pInputBuffer, &mReprocMeta, indexUsed, false, false);
5831                      if (rc < 0) {
5832                          LOGE("Fail to request on picture channel");
5833                          pthread_mutex_unlock(&mMutex);
5834                          return rc;
5835                      }
5836                  } else {
5837                      if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5838                          assert(NULL != mDepthChannel);
5839                          assert(mDepthChannel == output.stream->priv);
5840  
5841                          rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5842                          if (rc < 0) {
5843                              LOGE("Fail to map on depth buffer");
5844                              pthread_mutex_unlock(&mMutex);
5845                              return rc;
5846                          }
5847                          continue;
5848                      } else {
5849                          LOGD("snapshot request with buffer %p, frame_number %d",
5850                                   output.buffer, frameNumber);
5851                          if (!request->settings) {
5852                              rc = channel->request(output.buffer, frameNumber,
5853                                      NULL, mPrevParameters, indexUsed);
5854                          } else {
5855                              rc = channel->request(output.buffer, frameNumber,
5856                                      NULL, mParameters, indexUsed);
5857                          }
5858                          if (rc < 0) {
5859                              LOGE("Fail to request on picture channel");
5860                              pthread_mutex_unlock(&mMutex);
5861                              return rc;
5862                          }
5863  
5864                          uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5865                          uint32_t j = 0;
5866                          for (j = 0; j < streamsArray.num_streams; j++) {
5867                              if (streamsArray.stream_request[j].streamID == streamId) {
5868                                  if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5869                                      streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5870                                  else
5871                                      streamsArray.stream_request[j].buf_index = indexUsed;
5872                                  break;
5873                              }
5874                          }
5875                          if (j == streamsArray.num_streams) {
5876                              LOGE("Did not find matching stream to update index");
5877                              assert(0);
5878                          }
5879  
5880                          pendingBufferIter->need_metadata = true;
5881                          streams_need_metadata++;
5882                      }
5883                  }
5884              } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5885                  bool needMetadata = false;
5886                  QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5887                  rc = yuvChannel->request(output.buffer, frameNumber,
5888                          pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5889                          needMetadata, indexUsed, false, false);
5890                  if (rc < 0) {
5891                      LOGE("Fail to request on YUV channel");
5892                      pthread_mutex_unlock(&mMutex);
5893                      return rc;
5894                  }
5895  
5896                  uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5897                  uint32_t j = 0;
5898                  for (j = 0; j < streamsArray.num_streams; j++) {
5899                      if (streamsArray.stream_request[j].streamID == streamId) {
5900                          if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5901                              streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5902                          else
5903                              streamsArray.stream_request[j].buf_index = indexUsed;
5904                          break;
5905                      }
5906                  }
5907                  if (j == streamsArray.num_streams) {
5908                      LOGE("Did not find matching stream to update index");
5909                      assert(0);
5910                  }
5911  
5912                  pendingBufferIter->need_metadata = needMetadata;
5913                  if (needMetadata)
5914                      streams_need_metadata += 1;
5915                  LOGD("calling YUV channel request, need_metadata is %d",
5916                           needMetadata);
5917              } else {
5918                  LOGD("request with buffer %p, frame_number %d",
5919                        output.buffer, frameNumber);
5920  
5921                  rc = channel->request(output.buffer, frameNumber, indexUsed);
5922  
5923                  uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5924                  uint32_t j = 0;
5925                  for (j = 0; j < streamsArray.num_streams; j++) {
5926                      if (streamsArray.stream_request[j].streamID == streamId) {
5927                          if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5928                              streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5929                          else
5930                              streamsArray.stream_request[j].buf_index = indexUsed;
5931                          break;
5932                      }
5933                  }
5934                  if (j == streamsArray.num_streams) {
5935                      LOGE("Did not find matching stream to update index");
5936                      assert(0);
5937                  }
5938  
5939                  if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5940                          && mBatchSize) {
5941                      mToBeQueuedVidBufs++;
5942                      if (mToBeQueuedVidBufs == mBatchSize) {
5943                          channel->queueBatchBuf();
5944                      }
5945                  }
5946                  if (rc < 0) {
5947                      LOGE("request failed");
5948                      pthread_mutex_unlock(&mMutex);
5949                      return rc;
5950                  }
5951              }
5952              pendingBufferIter++;
5953          }
5954  
5955          for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5956                itr++) {
5957              QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5958  
5959              if (channel == NULL) {
5960                  LOGE("invalid channel pointer for stream");
5961                  assert(0);
5962                  pthread_mutex_unlock(&mMutex);
5963                  return BAD_VALUE;
5964              }
5965  
5966              InternalRequest requestedStream;
5967              requestedStream = (*itr);
5968  
5969  
5970              if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5971                  LOGD("snapshot request internally input buffer %p, frame_number %d",
5972                            request->input_buffer, frameNumber);
5973                  if(request->input_buffer != NULL){
5974                      rc = channel->request(NULL, frameNumber,
5975                              pInputBuffer, &mReprocMeta, indexUsed, true,
5976                              requestedStream.meteringOnly);
5977                      if (rc < 0) {
5978                          LOGE("Fail to request on picture channel");
5979                          pthread_mutex_unlock(&mMutex);
5980                          return rc;
5981                      }
5982                  } else {
5983                      LOGD("snapshot request with frame_number %d", frameNumber);
5984                      if (!request->settings) {
5985                          rc = channel->request(NULL, frameNumber,
5986                                  NULL, mPrevParameters, indexUsed, true,
5987                                  requestedStream.meteringOnly);
5988                      } else {
5989                          rc = channel->request(NULL, frameNumber,
5990                                  NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5991                      }
5992                      if (rc < 0) {
5993                          LOGE("Fail to request on picture channel");
5994                          pthread_mutex_unlock(&mMutex);
5995                          return rc;
5996                      }
5997  
5998                      if ((*itr).meteringOnly != 1) {
5999                          requestedStream.need_metadata = 1;
6000                          streams_need_metadata++;
6001                      }
6002                  }
6003  
6004                  uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6005                  uint32_t j = 0;
6006                  for (j = 0; j < streamsArray.num_streams; j++) {
6007                      if (streamsArray.stream_request[j].streamID == streamId) {
6008                        if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6009                            streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6010                        else
6011                            streamsArray.stream_request[j].buf_index = indexUsed;
6012                          break;
6013                      }
6014                  }
6015                  if (j == streamsArray.num_streams) {
6016                      LOGE("Did not find matching stream to update index");
6017                      assert(0);
6018                  }
6019  
6020              } else {
6021                  LOGE("Internal requests not supported on this stream type");
6022                  assert(0);
6023                  pthread_mutex_unlock(&mMutex);
6024                  return INVALID_OPERATION;
6025              }
6026              latestRequest->internalRequestList.push_back(requestedStream);
6027          }
6028  
6029          //If 2 streams have need_metadata set to true, fail the request, unless
6030          //we copy/reference count the metadata buffer
6031          if (streams_need_metadata > 1) {
6032              LOGE("not supporting request in which two streams requires"
6033                      " 2 HAL metadata for reprocessing");
6034              pthread_mutex_unlock(&mMutex);
6035              return -EINVAL;
6036          }
6037  
6038          cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
6039                  CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
6040          if (depthRequestPresent && mDepthChannel) {
6041              if (request->settings) {
6042                  camera_metadata_ro_entry entry;
6043                  if (find_camera_metadata_ro_entry(request->settings,
6044                          NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
6045                      if (entry.data.u8[0]) {
6046                          pdafEnable = CAM_PD_DATA_ENABLED;
6047                      } else {
6048                          pdafEnable = CAM_PD_DATA_SKIP;
6049                      }
6050                      mDepthCloudMode = pdafEnable;
6051                  } else {
6052                      pdafEnable = mDepthCloudMode;
6053                  }
6054              } else {
6055                  pdafEnable = mDepthCloudMode;
6056              }
6057          }
6058  
6059          if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
6060                  CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
6061              LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
6062              pthread_mutex_unlock(&mMutex);
6063              return BAD_VALUE;
6064          }
6065  
6066          if (request->input_buffer == NULL) {
6067              /* Set the parameters to backend:
6068               * - For every request in NORMAL MODE
6069               * - For every request in HFR mode during preview only case
6070               * - Once every batch in HFR mode during video recording
6071               */
6072              if (!mBatchSize ||
6073                 (mBatchSize && !isVidBufRequested) ||
6074                 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
6075                  LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
6076                           mBatchSize, isVidBufRequested,
6077                          mToBeQueuedVidBufs);
6078  
6079                  if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
6080                      for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6081                          uint32_t m = 0;
6082                          for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6083                              if (streamsArray.stream_request[k].streamID ==
6084                                      mBatchedStreamsArray.stream_request[m].streamID)
6085                                  break;
6086                              }
6087                              if (m == mBatchedStreamsArray.num_streams) {
6088                                  mBatchedStreamsArray.stream_request\
6089                                      [mBatchedStreamsArray.num_streams].streamID =
6090                                      streamsArray.stream_request[k].streamID;
6091                                  mBatchedStreamsArray.stream_request\
6092                                      [mBatchedStreamsArray.num_streams].buf_index =
6093                                      streamsArray.stream_request[k].buf_index;
6094                                  mBatchedStreamsArray.num_streams =
6095                                      mBatchedStreamsArray.num_streams + 1;
6096                              }
6097                      }
6098                      streamsArray = mBatchedStreamsArray;
6099                  }
6100                  /* Update stream id of all the requested buffers */
6101                  if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6102                          streamsArray)) {
6103                      LOGE("Failed to set stream type mask in the parameters");
6104                      pthread_mutex_unlock(&mMutex);
6105                      return BAD_VALUE;
6106                  }
6107  
6108                  rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6109                          mParameters);
6110                  if (rc < 0) {
6111                      LOGE("set_parms failed");
6112                  }
6113                  /* reset to zero coz, the batch is queued */
6114                  mToBeQueuedVidBufs = 0;
6115                  mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6116                  memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6117              } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
6118                  for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6119                      uint32_t m = 0;
6120                      for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6121                          if (streamsArray.stream_request[k].streamID ==
6122                                  mBatchedStreamsArray.stream_request[m].streamID)
6123                              break;
6124                      }
6125                      if (m == mBatchedStreamsArray.num_streams) {
6126                          mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6127                                  streamID = streamsArray.stream_request[k].streamID;
6128                          mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6129                                  buf_index = streamsArray.stream_request[k].buf_index;
6130                          mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6131                      }
6132                  }
6133              }
6134              mPendingLiveRequest++;
6135  
6136              // Start all streams after the first setting is sent, so that the
6137              // setting can be applied sooner: (0 + apply_delay)th frame.
6138              if (mState == CONFIGURED && mChannelHandle) {
6139                  //Then start them.
6140                  LOGH("Start META Channel");
6141                  rc = mMetadataChannel->start();
6142                  if (rc < 0) {
6143                      LOGE("META channel start failed");
6144                      pthread_mutex_unlock(&mMutex);
6145                      return rc;
6146                  }
6147  
6148                  if (mAnalysisChannel) {
6149                      rc = mAnalysisChannel->start();
6150                      if (rc < 0) {
6151                          LOGE("Analysis channel start failed");
6152                          mMetadataChannel->stop();
6153                          pthread_mutex_unlock(&mMutex);
6154                          return rc;
6155                      }
6156                  }
6157  
6158                  if (mSupportChannel) {
6159                      rc = mSupportChannel->start();
6160                      if (rc < 0) {
6161                          LOGE("Support channel start failed");
6162                          mMetadataChannel->stop();
6163                          /* Although support and analysis are mutually exclusive today
6164                             adding it in anycase for future proofing */
6165                          if (mAnalysisChannel) {
6166                              mAnalysisChannel->stop();
6167                          }
6168                          pthread_mutex_unlock(&mMutex);
6169                          return rc;
6170                      }
6171                  }
6172                  for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6173                          it != mStreamInfo.end(); it++) {
6174                      QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6175                      LOGH("Start Processing Channel mask=%d",
6176                              channel->getStreamTypeMask());
6177                      rc = channel->start();
6178                      if (rc < 0) {
6179                          LOGE("channel start failed");
6180                          pthread_mutex_unlock(&mMutex);
6181                          return rc;
6182                      }
6183                  }
6184  
6185                  if (mRawDumpChannel) {
6186                      LOGD("Starting raw dump stream");
6187                      rc = mRawDumpChannel->start();
6188                      if (rc != NO_ERROR) {
6189                          LOGE("Error Starting Raw Dump Channel");
6190                          for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6191                                it != mStreamInfo.end(); it++) {
6192                              QCamera3Channel *channel =
6193                                  (QCamera3Channel *)(*it)->stream->priv;
6194                              LOGH("Stopping Processing Channel mask=%d",
6195                                  channel->getStreamTypeMask());
6196                              channel->stop();
6197                          }
6198                          if (mSupportChannel)
6199                              mSupportChannel->stop();
6200                          if (mAnalysisChannel) {
6201                              mAnalysisChannel->stop();
6202                          }
6203                          mMetadataChannel->stop();
6204                          pthread_mutex_unlock(&mMutex);
6205                          return rc;
6206                      }
6207                  }
6208  
6209                  // Configure modules for stream on.
6210                  rc = startChannelLocked();
6211                  if (rc != NO_ERROR) {
6212                      LOGE("startChannelLocked failed %d", rc);
6213                      pthread_mutex_unlock(&mMutex);
6214                      return rc;
6215                  }
6216              }
6217          }
6218      }
6219  
6220      // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
6221      {
6222          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6223          if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
6224                  !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6225                  meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6226                  meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6227                  ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6228                  meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6229                  meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
6230  
6231              if (isSessionHdrPlusModeCompatible()) {
6232                  rc = enableHdrPlusModeLocked();
6233                  if (rc != OK) {
6234                      LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6235                      pthread_mutex_unlock(&mMutex);
6236                      return rc;
6237                  }
6238              }
6239  
6240              mFirstPreviewIntentSeen = true;
6241          }
6242      }
6243  
6244      LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6245  
6246      mState = STARTED;
6247      // Added a timed condition wait
6248      struct timespec ts;
6249      uint8_t isValidTimeout = 1;
6250      rc = clock_gettime(CLOCK_MONOTONIC, &ts);
6251      if (rc < 0) {
6252        isValidTimeout = 0;
6253        LOGE("Error reading the real time clock!!");
6254      }
6255      else {
6256        // Make timeout as 5 sec for request to be honored
6257        int64_t timeout = 5;
6258        {
6259            Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6260            // If there is a pending HDR+ request, the following requests may be blocked until the
6261            // HDR+ request is done. So allow a longer timeout.
6262            if (mHdrPlusPendingRequests.size() > 0) {
6263                timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6264            }
6265        }
6266        ts.tv_sec += timeout;
6267      }
6268      //Block on conditional variable
6269      while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
6270              (mState != ERROR) && (mState != DEINIT)) {
6271          if (!isValidTimeout) {
6272              LOGD("Blocking on conditional wait");
6273              pthread_cond_wait(&mRequestCond, &mMutex);
6274          }
6275          else {
6276              LOGD("Blocking on timed conditional wait");
6277              rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6278              if (rc == ETIMEDOUT) {
6279                  rc = -ENODEV;
6280                  LOGE("Unblocked on timeout!!!!");
6281                  break;
6282              }
6283          }
6284          LOGD("Unblocked");
6285          if (mWokenUpByDaemon) {
6286              mWokenUpByDaemon = false;
6287              if (mPendingLiveRequest < mMaxInFlightRequests)
6288                  break;
6289          }
6290      }
6291      pthread_mutex_unlock(&mMutex);
6292  
6293      return rc;
6294  }
6295  
startChannelLocked()6296  int32_t QCamera3HardwareInterface::startChannelLocked()
6297  {
6298      // Configure modules for stream on.
6299      int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6300              mChannelHandle, /*start_sensor_streaming*/false);
6301      if (rc != NO_ERROR) {
6302          LOGE("start_channel failed %d", rc);
6303          return rc;
6304      }
6305  
6306      {
6307          // Configure Easel for stream on.
6308          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6309          if (EaselManagerClientOpened) {
6310              // Now that sensor mode should have been selected, get the selected sensor mode
6311              // info.
6312              memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6313              rc = getCurrentSensorModeInfo(mSensorModeInfo);
6314              if (rc != NO_ERROR) {
6315                  ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
6316                          strerror(-rc), rc);
6317                  return rc;
6318              }
6319              logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6320              rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6321                      /*enableCapture*/true);
6322              if (rc != OK) {
6323                  ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6324                          mCameraId, mSensorModeInfo.op_pixel_clk);
6325                  return rc;
6326              }
6327              logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6328              mEaselMipiStarted = true;
6329          }
6330      }
6331  
6332      // Start sensor streaming.
6333      rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6334              mChannelHandle);
6335      if (rc != NO_ERROR) {
6336          LOGE("start_sensor_stream_on failed %d", rc);
6337          return rc;
6338      }
6339  
6340      return 0;
6341  }
6342  
stopChannelLocked(bool stopChannelImmediately)6343  void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6344  {
6345      mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6346              mChannelHandle, stopChannelImmediately);
6347  
6348      {
6349          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6350          if (EaselManagerClientOpened && mEaselMipiStarted) {
6351              int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6352              if (rc != 0) {
6353                  ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6354              }
6355              mEaselMipiStarted = false;
6356          }
6357      }
6358  }
6359  
6360  /*===========================================================================
6361   * FUNCTION   : dump
6362   *
6363   * DESCRIPTION:
6364   *
6365   * PARAMETERS :
6366   *
6367   *
6368   * RETURN     :
6369   *==========================================================================*/
dump(int fd)6370  void QCamera3HardwareInterface::dump(int fd)
6371  {
6372      pthread_mutex_lock(&mMutex);
6373      dprintf(fd, "\n Camera HAL3 information Begin \n");
6374  
6375      dprintf(fd, "\nNumber of pending requests: %zu \n",
6376          mPendingRequestsList.size());
6377      dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6378      dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
6379      dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6380      for(pendingRequestIterator i = mPendingRequestsList.begin();
6381              i != mPendingRequestsList.end(); i++) {
6382          dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6383          i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6384          i->input_buffer);
6385      }
6386      dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6387                  mPendingBuffersMap.get_num_overall_buffers());
6388      dprintf(fd, "-------+------------------\n");
6389      dprintf(fd, " Frame | Stream type mask \n");
6390      dprintf(fd, "-------+------------------\n");
6391      for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6392          for(auto &j : req.mPendingBufferList) {
6393              QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6394              dprintf(fd, " %5d | %11d \n",
6395                      req.frame_number, channel->getStreamTypeMask());
6396          }
6397      }
6398      dprintf(fd, "-------+------------------\n");
6399  
6400      dprintf(fd, "\nPending frame drop list: %zu\n",
6401          mPendingFrameDropList.size());
6402      dprintf(fd, "-------+-----------\n");
6403      dprintf(fd, " Frame | Stream ID \n");
6404      dprintf(fd, "-------+-----------\n");
6405      for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6406          i != mPendingFrameDropList.end(); i++) {
6407          dprintf(fd, " %5d | %9d \n",
6408              i->frame_number, i->stream_ID);
6409      }
6410      dprintf(fd, "-------+-----------\n");
6411  
6412      dprintf(fd, "\n Camera HAL3 information End \n");
6413  
6414      /* use dumpsys media.camera as trigger to send update debug level event */
6415      mUpdateDebugLevel = true;
6416      pthread_mutex_unlock(&mMutex);
6417      return;
6418  }
6419  
6420  /*===========================================================================
6421   * FUNCTION   : flush
6422   *
6423   * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6424   *              conditionally restarts channels
6425   *
6426   * PARAMETERS :
6427   *  @ restartChannels: re-start all channels
6428   *  @ stopChannelImmediately: stop the channel immediately. This should be used
6429   *                            when device encountered an error and MIPI may has
6430   *                            been stopped.
6431   *
6432   * RETURN     :
6433   *          0 on success
6434   *          Error code on failure
6435   *==========================================================================*/
flush(bool restartChannels,bool stopChannelImmediately)6436  int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
6437  {
6438      KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6439      int32_t rc = NO_ERROR;
6440  
6441      LOGD("Unblocking Process Capture Request");
6442      pthread_mutex_lock(&mMutex);
6443      mFlush = true;
6444      pthread_mutex_unlock(&mMutex);
6445  
6446      // Disable HDR+ if it's enabled;
6447      {
6448          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6449          finishHdrPlusClientOpeningLocked(l);
6450          disableHdrPlusModeLocked();
6451      }
6452  
6453      rc = stopAllChannels();
6454      // unlink of dualcam
6455      if (mIsDeviceLinked) {
6456          cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6457                  &m_pDualCamCmdPtr->bundle_info;
6458          m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
6459          m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6460          pthread_mutex_lock(&gCamLock);
6461  
6462          if (mIsMainCamera == 1) {
6463              m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6464              m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
6465              m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6466              // related session id should be session id of linked session
6467              m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6468          } else {
6469              m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6470              m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
6471              m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6472              m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6473          }
6474          m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
6475          pthread_mutex_unlock(&gCamLock);
6476  
6477          rc = mCameraHandle->ops->set_dual_cam_cmd(
6478                  mCameraHandle->camera_handle);
6479          if (rc < 0) {
6480              LOGE("Dualcam: Unlink failed, but still proceed to close");
6481          }
6482      }
6483  
6484      if (rc < 0) {
6485          LOGE("stopAllChannels failed");
6486          return rc;
6487      }
6488      if (mChannelHandle) {
6489          stopChannelLocked(stopChannelImmediately);
6490      }
6491  
6492      // Reset bundle info
6493      rc = setBundleInfo();
6494      if (rc < 0) {
6495          LOGE("setBundleInfo failed %d", rc);
6496          return rc;
6497      }
6498  
6499      // Mutex Lock
6500      pthread_mutex_lock(&mMutex);
6501  
6502      // Unblock process_capture_request
6503      mPendingLiveRequest = 0;
6504      pthread_cond_signal(&mRequestCond);
6505  
6506      rc = notifyErrorForPendingRequests();
6507      if (rc < 0) {
6508          LOGE("notifyErrorForPendingRequests failed");
6509          pthread_mutex_unlock(&mMutex);
6510          return rc;
6511      }
6512  
6513      mFlush = false;
6514  
6515      // Start the Streams/Channels
6516      if (restartChannels) {
6517          rc = startAllChannels();
6518          if (rc < 0) {
6519              LOGE("startAllChannels failed");
6520              pthread_mutex_unlock(&mMutex);
6521              return rc;
6522          }
6523          if (mChannelHandle) {
6524              // Configure modules for stream on.
6525              rc = startChannelLocked();
6526              if (rc < 0) {
6527                  LOGE("startChannelLocked failed");
6528                  pthread_mutex_unlock(&mMutex);
6529                  return rc;
6530              }
6531          }
6532          mFirstPreviewIntentSeen = false;
6533      }
6534      pthread_mutex_unlock(&mMutex);
6535  
6536      return 0;
6537  }
6538  
6539  /*===========================================================================
6540   * FUNCTION   : flushPerf
6541   *
6542   * DESCRIPTION: This is the performance optimization version of flush that does
6543   *              not use stream off, rather flushes the system
6544   *
6545   * PARAMETERS :
6546   *
6547   *
6548   * RETURN     : 0 : success
6549   *              -EINVAL: input is malformed (device is not valid)
6550   *              -ENODEV: if the device has encountered a serious error
6551   *==========================================================================*/
flushPerf()6552  int QCamera3HardwareInterface::flushPerf()
6553  {
6554      KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6555      int32_t rc = 0;
6556      struct timespec timeout;
6557      bool timed_wait = false;
6558  
6559      pthread_mutex_lock(&mMutex);
6560      mFlushPerf = true;
6561      mPendingBuffersMap.numPendingBufsAtFlush =
6562          mPendingBuffersMap.get_num_overall_buffers();
6563      LOGD("Calling flush. Wait for %d buffers to return",
6564          mPendingBuffersMap.numPendingBufsAtFlush);
6565  
6566      /* send the flush event to the backend */
6567      rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6568      if (rc < 0) {
6569          LOGE("Error in flush: IOCTL failure");
6570          mFlushPerf = false;
6571          pthread_mutex_unlock(&mMutex);
6572          return -ENODEV;
6573      }
6574  
6575      if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6576          LOGD("No pending buffers in HAL, return flush");
6577          mFlushPerf = false;
6578          pthread_mutex_unlock(&mMutex);
6579          return rc;
6580      }
6581  
6582      /* wait on a signal that buffers were received */
6583      rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
6584      if (rc < 0) {
6585          LOGE("Error reading the real time clock, cannot use timed wait");
6586      } else {
6587          timeout.tv_sec += FLUSH_TIMEOUT;
6588          timed_wait = true;
6589      }
6590  
6591      //Block on conditional variable
6592      while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6593          LOGD("Waiting on mBuffersCond");
6594          if (!timed_wait) {
6595              rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6596              if (rc != 0) {
6597                   LOGE("pthread_cond_wait failed due to rc = %s",
6598                          strerror(rc));
6599                   break;
6600              }
6601          } else {
6602              rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6603              if (rc != 0) {
6604                  LOGE("pthread_cond_timedwait failed due to rc = %s",
6605                              strerror(rc));
6606                  break;
6607              }
6608          }
6609      }
6610      if (rc != 0) {
6611          mFlushPerf = false;
6612          pthread_mutex_unlock(&mMutex);
6613          return -ENODEV;
6614      }
6615  
6616      LOGD("Received buffers, now safe to return them");
6617  
6618      //make sure the channels handle flush
6619      //currently only required for the picture channel to release snapshot resources
6620      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6621              it != mStreamInfo.end(); it++) {
6622          QCamera3Channel *channel = (*it)->channel;
6623          if (channel) {
6624              rc = channel->flush();
6625              if (rc) {
6626                 LOGE("Flushing the channels failed with error %d", rc);
6627                 // even though the channel flush failed we need to continue and
6628                 // return the buffers we have to the framework, however the return
6629                 // value will be an error
6630                 rc = -ENODEV;
6631              }
6632          }
6633      }
6634  
6635      /* notify the frameworks and send errored results */
6636      rc = notifyErrorForPendingRequests();
6637      if (rc < 0) {
6638          LOGE("notifyErrorForPendingRequests failed");
6639          pthread_mutex_unlock(&mMutex);
6640          return rc;
6641      }
6642  
6643      //unblock process_capture_request
6644      mPendingLiveRequest = 0;
6645      unblockRequestIfNecessary();
6646  
6647      mFlushPerf = false;
6648      pthread_mutex_unlock(&mMutex);
6649      LOGD ("Flush Operation complete. rc = %d", rc);
6650      return rc;
6651  }
6652  
6653  /*===========================================================================
6654   * FUNCTION   : handleCameraDeviceError
6655   *
6656   * DESCRIPTION: This function calls internal flush and notifies the error to
6657   *              framework and updates the state variable.
6658   *
6659   * PARAMETERS :
6660   *   @stopChannelImmediately : stop channels immediately without waiting for
6661   *                             frame boundary.
6662   *
6663   * RETURN     : NO_ERROR on Success
6664   *              Error code on failure
6665   *==========================================================================*/
handleCameraDeviceError(bool stopChannelImmediately)6666  int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
6667  {
6668      int32_t rc = NO_ERROR;
6669  
6670      {
6671          Mutex::Autolock lock(mFlushLock);
6672          pthread_mutex_lock(&mMutex);
6673          if (mState != ERROR) {
6674              //if mState != ERROR, nothing to be done
6675              pthread_mutex_unlock(&mMutex);
6676              return NO_ERROR;
6677          }
6678          pthread_mutex_unlock(&mMutex);
6679  
6680          rc = flush(false /* restart channels */, stopChannelImmediately);
6681          if (NO_ERROR != rc) {
6682              LOGE("internal flush to handle mState = ERROR failed");
6683          }
6684  
6685          pthread_mutex_lock(&mMutex);
6686          mState = DEINIT;
6687          pthread_mutex_unlock(&mMutex);
6688      }
6689  
6690      camera3_notify_msg_t notify_msg;
6691      memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6692      notify_msg.type = CAMERA3_MSG_ERROR;
6693      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6694      notify_msg.message.error.error_stream = NULL;
6695      notify_msg.message.error.frame_number = 0;
6696      orchestrateNotify(&notify_msg);
6697  
6698      return rc;
6699  }
6700  
6701  /*===========================================================================
6702   * FUNCTION   : captureResultCb
6703   *
6704   * DESCRIPTION: Callback handler for all capture result
6705   *              (streams, as well as metadata)
6706   *
6707   * PARAMETERS :
6708   *   @metadata : metadata information
6709   *   @buffer   : actual gralloc buffer to be returned to frameworks.
6710   *               NULL if metadata.
6711   *
6712   * RETURN     : NONE
6713   *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)6714  void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6715                  camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6716  {
6717      if (metadata_buf) {
6718          pthread_mutex_lock(&mMutex);
6719          uint8_t batchSize = mBatchSize;
6720          pthread_mutex_unlock(&mMutex);
6721          if (batchSize) {
6722              handleBatchMetadata(metadata_buf,
6723                      true /* free_and_bufdone_meta_buf */);
6724          } else { /* mBatchSize = 0 */
6725              hdrPlusPerfLock(metadata_buf);
6726              pthread_mutex_lock(&mMutex);
6727              handleMetadataWithLock(metadata_buf,
6728                      true /* free_and_bufdone_meta_buf */,
6729                      true /* last urgent frame of batch metadata */,
6730                      true /* last frame of batch metadata */,
6731                      NULL);
6732              pthread_mutex_unlock(&mMutex);
6733          }
6734      } else if (isInputBuffer) {
6735          pthread_mutex_lock(&mMutex);
6736          handleInputBufferWithLock(frame_number);
6737          pthread_mutex_unlock(&mMutex);
6738      } else {
6739          pthread_mutex_lock(&mMutex);
6740          handleBufferWithLock(buffer, frame_number);
6741          pthread_mutex_unlock(&mMutex);
6742      }
6743      return;
6744  }
6745  
6746  /*===========================================================================
6747   * FUNCTION   : getReprocessibleOutputStreamId
6748   *
6749   * DESCRIPTION: Get source output stream id for the input reprocess stream
6750   *              based on size and format, which would be the largest
6751   *              output stream if an input stream exists.
6752   *
6753   * PARAMETERS :
6754   *   @id      : return the stream id if found
6755   *
6756   * RETURN     : int32_t type of status
6757   *              NO_ERROR  -- success
6758   *              none-zero failure code
6759   *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)6760  int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6761  {
6762      /* check if any output or bidirectional stream with the same size and format
6763         and return that stream */
6764      if ((mInputStreamInfo.dim.width > 0) &&
6765              (mInputStreamInfo.dim.height > 0)) {
6766          for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6767                  it != mStreamInfo.end(); it++) {
6768  
6769              camera3_stream_t *stream = (*it)->stream;
6770              if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6771                      (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6772                      (stream->format == mInputStreamInfo.format)) {
6773                  // Usage flag for an input stream and the source output stream
6774                  // may be different.
6775                  LOGD("Found reprocessible output stream! %p", *it);
6776                  LOGD("input stream usage 0x%x, current stream usage 0x%x",
6777                           stream->usage, mInputStreamInfo.usage);
6778  
6779                  QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6780                  if (channel != NULL && channel->mStreams[0]) {
6781                      id = channel->mStreams[0]->getMyServerID();
6782                      return NO_ERROR;
6783                  }
6784              }
6785          }
6786      } else {
6787          LOGD("No input stream, so no reprocessible output stream");
6788      }
6789      return NAME_NOT_FOUND;
6790  }
6791  
6792  /*===========================================================================
6793   * FUNCTION   : lookupFwkName
6794   *
6795   * DESCRIPTION: In case the enum is not same in fwk and backend
6796   *              make sure the parameter is correctly propogated
6797   *
6798   * PARAMETERS  :
6799   *   @arr      : map between the two enums
6800   *   @len      : len of the map
6801   *   @hal_name : name of the hal_parm to map
6802   *
6803   * RETURN     : int type of status
6804   *              fwk_name  -- success
6805   *              none-zero failure code
6806   *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)6807  template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6808          size_t len, halType hal_name)
6809  {
6810  
6811      for (size_t i = 0; i < len; i++) {
6812          if (arr[i].hal_name == hal_name) {
6813              return arr[i].fwk_name;
6814          }
6815      }
6816  
6817      /* Not able to find matching framework type is not necessarily
6818       * an error case. This happens when mm-camera supports more attributes
6819       * than the frameworks do */
6820      LOGH("Cannot find matching framework type");
6821      return NAME_NOT_FOUND;
6822  }
6823  
6824  /*===========================================================================
6825   * FUNCTION   : lookupHalName
6826   *
6827   * DESCRIPTION: In case the enum is not same in fwk and backend
6828   *              make sure the parameter is correctly propogated
6829   *
6830   * PARAMETERS  :
6831   *   @arr      : map between the two enums
6832   *   @len      : len of the map
6833   *   @fwk_name : name of the hal_parm to map
6834   *
6835   * RETURN     : int32_t type of status
6836   *              hal_name  -- success
6837   *              none-zero failure code
6838   *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)6839  template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6840          size_t len, fwkType fwk_name)
6841  {
6842      for (size_t i = 0; i < len; i++) {
6843          if (arr[i].fwk_name == fwk_name) {
6844              return arr[i].hal_name;
6845          }
6846      }
6847  
6848      LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6849      return NAME_NOT_FOUND;
6850  }
6851  
6852  /*===========================================================================
6853   * FUNCTION   : lookupProp
6854   *
6855   * DESCRIPTION: lookup a value by its name
6856   *
6857   * PARAMETERS :
6858   *   @arr     : map between the two enums
6859   *   @len     : size of the map
6860   *   @name    : name to be looked up
6861   *
6862   * RETURN     : Value if found
6863   *              CAM_CDS_MODE_MAX if not found
6864   *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)6865  template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6866          size_t len, const char *name)
6867  {
6868      if (name) {
6869          for (size_t i = 0; i < len; i++) {
6870              if (!strcmp(arr[i].desc, name)) {
6871                  return arr[i].val;
6872              }
6873          }
6874      }
6875      return CAM_CDS_MODE_MAX;
6876  }
6877  
6878  /*===========================================================================
6879   *
6880   * DESCRIPTION:
6881   *
6882   * PARAMETERS :
6883   *   @metadata : metadata information from callback
6884   *   @pendingRequest: pending request for this metadata
6885   *   @pprocDone: whether internal offline postprocsesing is done
6886   *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6887   *                         in a batch. Always true for non-batch mode.
6888   *
6889   * RETURN     : camera_metadata_t*
6890   *              metadata in a format specified by fwk
6891   *==========================================================================*/
6892  camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch,const bool * enableZsl)6893  QCamera3HardwareInterface::translateFromHalMetadata(
6894                                   metadata_buffer_t *metadata,
6895                                   const PendingRequestInfo& pendingRequest,
6896                                   bool pprocDone,
6897                                   bool lastMetadataInBatch,
6898                                   const bool *enableZsl)
6899  {
6900      CameraMetadata camMetadata;
6901      camera_metadata_t *resultMetadata;
6902  
6903      if (!lastMetadataInBatch) {
6904          /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6905           * Timestamp is needed because it's used for shutter notify calculation.
6906           * */
6907          camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6908          resultMetadata = camMetadata.release();
6909          return resultMetadata;
6910      }
6911  
6912      if (pendingRequest.jpegMetadata.entryCount())
6913          camMetadata.append(pendingRequest.jpegMetadata);
6914  
6915      camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6916      camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6917      camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6918      camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6919      camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
6920      camMetadata.update(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE, &pendingRequest.motion_detection_enable, 1);
6921      if (mBatchSize == 0) {
6922          // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6923          camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
6924      }
6925  
6926      // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6927      // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6928      if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
6929          // DevCamDebug metadata translateFromHalMetadata AF
6930          IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6931                  CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6932              int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6933              camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6934          }
6935          IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6936                  CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
6937              int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6938              camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6939          }
6940          IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6941                  CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
6942              int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6943              camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6944          }
6945          IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6946                  CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6947              int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6948              camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6949          }
6950          IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6951                  CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6952              int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6953              camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6954          }
6955          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6956                  CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6957              int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6958                  *DevCamDebug_af_monitor_pdaf_target_pos;
6959              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6960                  &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6961          }
6962          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6963                  CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6964              int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6965                  *DevCamDebug_af_monitor_pdaf_confidence;
6966              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6967                  &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6968          }
6969          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6970                  CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6971              int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6972              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6973                  &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6974          }
6975          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6976                  CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6977              int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6978                  *DevCamDebug_af_monitor_tof_target_pos;
6979              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6980                  &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6981          }
6982          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6983                  CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6984              int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6985                  *DevCamDebug_af_monitor_tof_confidence;
6986              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6987                  &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6988          }
6989          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6990                  CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6991              int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6992              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6993                  &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6994          }
6995          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6996                  CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6997              int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6998              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6999                  &fwk_DevCamDebug_af_monitor_type_select, 1);
7000          }
7001          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
7002                  CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
7003              int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
7004              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7005                  &fwk_DevCamDebug_af_monitor_refocus, 1);
7006          }
7007          IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
7008                  CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
7009              int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
7010              camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7011                  &fwk_DevCamDebug_af_monitor_target_pos, 1);
7012          }
7013          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
7014                  CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
7015              int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
7016                  *DevCamDebug_af_search_pdaf_target_pos;
7017              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7018                  &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
7019          }
7020          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
7021                  CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
7022              int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
7023              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7024                  &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
7025          }
7026          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
7027                  CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
7028              int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
7029              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7030                  &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
7031          }
7032          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
7033                  CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
7034              int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
7035              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7036                  &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
7037          }
7038          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
7039                  CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
7040              int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
7041              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7042                  &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
7043          }
7044          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
7045                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
7046              int32_t fwk_DevCamDebug_af_search_tof_target_pos =
7047                  *DevCamDebug_af_search_tof_target_pos;
7048              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7049                  &fwk_DevCamDebug_af_search_tof_target_pos, 1);
7050          }
7051          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
7052                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
7053              int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
7054              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7055                  &fwk_DevCamDebug_af_search_tof_next_pos, 1);
7056          }
7057          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
7058                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
7059              int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
7060              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7061                  &fwk_DevCamDebug_af_search_tof_near_pos, 1);
7062          }
7063          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
7064                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
7065              int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
7066              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7067                  &fwk_DevCamDebug_af_search_tof_far_pos, 1);
7068          }
7069          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
7070                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
7071              int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
7072              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7073                  &fwk_DevCamDebug_af_search_tof_confidence, 1);
7074          }
7075          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
7076                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
7077              int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
7078              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7079                  &fwk_DevCamDebug_af_search_type_select, 1);
7080          }
7081          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
7082                  CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
7083              int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
7084              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7085                  &fwk_DevCamDebug_af_search_next_pos, 1);
7086          }
7087          IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
7088                  CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
7089              int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
7090              camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7091                  &fwk_DevCamDebug_af_search_target_pos, 1);
7092          }
7093          // DevCamDebug metadata translateFromHalMetadata AEC
7094          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
7095                  CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
7096              int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
7097              camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
7098      }
7099          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
7100                  CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
7101              int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
7102              camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
7103          }
7104          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7105                  CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7106              int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7107              camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7108          }
7109          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7110                  CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7111              int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7112              camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7113          }
7114          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7115                  CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7116              int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7117              camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7118          }
7119          IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7120                  CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7121              float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7122              camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7123          }
7124          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7125                  CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7126              int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7127              camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7128          }
7129          IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7130                  CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7131              float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7132              camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7133          }
7134          // DevCamDebug metadata translateFromHalMetadata zzHDR
7135          IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7136                  CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7137              float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7138              camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7139          }
7140          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7141                  CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
7142              int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
7143              camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7144          }
7145          IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7146                  CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7147              float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7148              camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7149          }
7150          IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7151                  CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
7152              int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
7153              camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7154          }
7155          IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7156                  CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7157              float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7158                  *DevCamDebug_aec_hdr_sensitivity_ratio;
7159              camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7160                                 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7161          }
7162          IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7163                  CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7164              float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7165              camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7166                                 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7167          }
7168          // DevCamDebug metadata translateFromHalMetadata ADRC
7169          IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7170                  CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7171              float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7172              camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7173                                 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7174          }
7175          IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7176                  CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7177              float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7178              camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7179                                 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7180          }
7181          IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7182                  CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7183              float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7184              camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7185          }
7186          IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7187                  CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7188              float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7189              camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7190          }
7191          IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7192                  CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7193              float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7194              camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7195          }
7196          IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7197                  CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7198              float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7199              camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7200          }
7201          // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7202          IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7203                  CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7204              float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7205              camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7206                                 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7207          }
7208          IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7209                  CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7210              float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7211              camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7212                                 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7213          }
7214          IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7215                  CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7216              float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7217              camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7218                                 &fwk_DevCamDebug_aec_subject_motion, 1);
7219          }
7220          // DevCamDebug metadata translateFromHalMetadata AWB
7221          IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7222                  CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7223              float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7224              camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7225          }
7226          IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7227                  CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7228              float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7229              camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7230          }
7231          IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7232                  CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7233              float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7234              camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7235          }
7236          IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7237                  CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7238              int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7239              camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7240          }
7241          IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7242                  CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7243              int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7244              camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7245          }
7246      }
7247      // atrace_end(ATRACE_TAG_ALWAYS);
7248  
7249      IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7250          int64_t fwk_frame_number = *frame_number;
7251          camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7252      }
7253  
7254      IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7255          int32_t fps_range[2];
7256          fps_range[0] = (int32_t)float_range->min_fps;
7257          fps_range[1] = (int32_t)float_range->max_fps;
7258          camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7259                                        fps_range, 2);
7260          LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7261               fps_range[0], fps_range[1]);
7262      }
7263  
7264      IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7265          camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7266      }
7267  
7268      IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7269          int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7270                  METADATA_MAP_SIZE(SCENE_MODES_MAP),
7271                  *sceneMode);
7272          if (NAME_NOT_FOUND != val) {
7273              uint8_t fwkSceneMode = (uint8_t)val;
7274              camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7275              LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7276                       fwkSceneMode);
7277          }
7278      }
7279  
7280      IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7281          uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7282          camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7283      }
7284  
7285      IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7286          uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7287          camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7288      }
7289  
7290      IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7291          uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7292          camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7293      }
7294  
7295      IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7296              CAM_INTF_META_EDGE_MODE, metadata) {
7297          camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7298      }
7299  
7300      IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7301          uint8_t fwk_flashPower = (uint8_t) *flashPower;
7302          camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7303      }
7304  
7305      IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7306          camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7307      }
7308  
7309      IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7310          if (0 <= *flashState) {
7311              uint8_t fwk_flashState = (uint8_t) *flashState;
7312              if (!gCamCapability[mCameraId]->flash_available) {
7313                  fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7314              }
7315              camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7316          }
7317      }
7318  
7319      IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7320          int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7321          if (NAME_NOT_FOUND != val) {
7322              uint8_t fwk_flashMode = (uint8_t)val;
7323              camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7324          }
7325      }
7326  
7327      IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7328          uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7329          camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7330      }
7331  
7332      IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7333          camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7334      }
7335  
7336      IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7337          camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7338      }
7339  
7340      IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7341          camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7342      }
7343  
7344      IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7345          uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7346          camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7347      }
7348  
7349      IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7350          uint8_t fwk_videoStab = (uint8_t) *videoStab;
7351          LOGD("fwk_videoStab = %d", fwk_videoStab);
7352          camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7353      } else {
7354          // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7355          // and so hardcoding the Video Stab result to OFF mode.
7356          uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7357          camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
7358          LOGD("EIS result default to OFF mode");
7359      }
7360  
7361      IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7362          uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7363          camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7364      }
7365  
7366      IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7367          camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7368      }
7369  
7370      IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7371          CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
7372          float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
7373  
7374          adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7375                gCamCapability[mCameraId]->color_arrangement);
7376  
7377          LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
7378            blackLevelAppliedPattern->cam_black_level[0],
7379            blackLevelAppliedPattern->cam_black_level[1],
7380            blackLevelAppliedPattern->cam_black_level[2],
7381            blackLevelAppliedPattern->cam_black_level[3]);
7382          camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7383                  BLACK_LEVEL_PATTERN_CNT);
7384  
7385  #ifndef USE_HAL_3_3
7386          // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
7387          // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
7388          // depth space.
7389          fwk_blackLevelInd[0] /= 16.0;
7390          fwk_blackLevelInd[1] /= 16.0;
7391          fwk_blackLevelInd[2] /= 16.0;
7392          fwk_blackLevelInd[3] /= 16.0;
7393          camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7394                  BLACK_LEVEL_PATTERN_CNT);
7395  #endif
7396      }
7397  
7398  #ifndef USE_HAL_3_3
7399      // Fixed whitelevel is used by ISP/Sensor
7400      camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7401              &gCamCapability[mCameraId]->white_level, 1);
7402  #endif
7403  
7404      IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7405              CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7406          int32_t scalerCropRegion[4];
7407          scalerCropRegion[0] = hScalerCropRegion->left;
7408          scalerCropRegion[1] = hScalerCropRegion->top;
7409          scalerCropRegion[2] = hScalerCropRegion->width;
7410          scalerCropRegion[3] = hScalerCropRegion->height;
7411  
7412          // Adjust crop region from sensor output coordinate system to active
7413          // array coordinate system.
7414          mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7415                  scalerCropRegion[2], scalerCropRegion[3]);
7416  
7417          camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7418      }
7419  
7420      IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7421          LOGD("sensorExpTime = %lld", *sensorExpTime);
7422          camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7423      }
7424  
7425      IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7426          LOGD("expTimeBoost = %f", *expTimeBoost);
7427          camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7428      }
7429  
7430      IF_META_AVAILABLE(int64_t, sensorFameDuration,
7431              CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7432          LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7433          camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7434      }
7435  
7436      IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7437              CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7438          LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7439          camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7440                  sensorRollingShutterSkew, 1);
7441      }
7442  
7443      IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7444          LOGD("sensorSensitivity = %d", *sensorSensitivity);
7445          camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7446  
7447          //calculate the noise profile based on sensitivity
7448          double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7449          double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7450          double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7451          for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7452              noise_profile[i]   = noise_profile_S;
7453              noise_profile[i+1] = noise_profile_O;
7454          }
7455          LOGD("noise model entry (S, O) is (%f, %f)",
7456                  noise_profile_S, noise_profile_O);
7457          camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7458                  (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7459      }
7460  
7461  #ifndef USE_HAL_3_3
7462      int32_t fwk_ispSensitivity = 100;
7463      IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
7464          fwk_ispSensitivity = (int32_t) *ispSensitivity;
7465      }
7466      IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7467          fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7468      }
7469      camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
7470  #endif
7471  
7472      IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7473          uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7474          camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7475      }
7476  
7477      IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7478          int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7479                  *faceDetectMode);
7480          if (NAME_NOT_FOUND != val) {
7481              uint8_t fwk_faceDetectMode = (uint8_t)val;
7482              camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7483  
7484              if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7485                  IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7486                          CAM_INTF_META_FACE_DETECTION, metadata) {
7487                      uint8_t numFaces = MIN(
7488                              faceDetectionInfo->num_faces_detected, MAX_ROI);
7489                      int32_t faceIds[MAX_ROI];
7490                      uint8_t faceScores[MAX_ROI];
7491                      int32_t faceRectangles[MAX_ROI * 4];
7492                      int32_t faceLandmarks[MAX_ROI * 6];
7493                      size_t j = 0, k = 0;
7494  
7495                      for (size_t i = 0; i < numFaces; i++) {
7496                          faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7497                          // Adjust crop region from sensor output coordinate system to active
7498                          // array coordinate system.
7499                          cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
7500                          mCropRegionMapper.toActiveArray(rect.left, rect.top,
7501                                  rect.width, rect.height);
7502  
7503                          convertToRegions(rect, faceRectangles+j, -1);
7504  
7505                          LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7506                                  "bottom-right (%d, %d)",
7507                                  faceDetectionInfo->frame_id, i,
7508                                  faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7509                                  faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7510  
7511                          j+= 4;
7512                      }
7513                      if (numFaces <= 0) {
7514                          memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7515                          memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7516                          memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7517                          memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7518                      }
7519  
7520                      camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7521                              numFaces);
7522                      camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7523                              faceRectangles, numFaces * 4U);
7524                      if (fwk_faceDetectMode ==
7525                              ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7526                          IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7527                                  CAM_INTF_META_FACE_LANDMARK, metadata) {
7528  
7529                              for (size_t i = 0; i < numFaces; i++) {
7530                                  cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
7531                                  // Map the co-ordinate sensor output coordinate system to active
7532                                  // array coordinate system.
7533                                  mCropRegionMapper.toActiveArray(
7534                                          face_landmarks.left_eye_center.x,
7535                                          face_landmarks.left_eye_center.y);
7536                                  mCropRegionMapper.toActiveArray(
7537                                          face_landmarks.right_eye_center.x,
7538                                          face_landmarks.right_eye_center.y);
7539                                  mCropRegionMapper.toActiveArray(
7540                                          face_landmarks.mouth_center.x,
7541                                          face_landmarks.mouth_center.y);
7542  
7543                                  convertLandmarks(face_landmarks, faceLandmarks+k);
7544  
7545                                  LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7546                                          "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7547                                          faceDetectionInfo->frame_id, i,
7548                                          faceLandmarks[k + LEFT_EYE_X],
7549                                          faceLandmarks[k + LEFT_EYE_Y],
7550                                          faceLandmarks[k + RIGHT_EYE_X],
7551                                          faceLandmarks[k + RIGHT_EYE_Y],
7552                                          faceLandmarks[k + MOUTH_X],
7553                                          faceLandmarks[k + MOUTH_Y]);
7554  
7555                                  k+= TOTAL_LANDMARK_INDICES;
7556                              }
7557                          } else {
7558                              for (size_t i = 0; i < numFaces; i++) {
7559                                  setInvalidLandmarks(faceLandmarks+k);
7560                                  k+= TOTAL_LANDMARK_INDICES;
7561                              }
7562                          }
7563  
7564                          for (size_t i = 0; i < numFaces; i++) {
7565                              faceIds[i] = faceDetectionInfo->faces[i].face_id;
7566  
7567                              LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7568                                      faceDetectionInfo->frame_id, i, faceIds[i]);
7569                          }
7570  
7571                          camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7572                          camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7573                                  faceLandmarks, numFaces * 6U);
7574                      }
7575                      IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7576                              CAM_INTF_META_FACE_BLINK, metadata) {
7577                          uint8_t detected[MAX_ROI];
7578                          uint8_t degree[MAX_ROI * 2];
7579                          for (size_t i = 0; i < numFaces; i++) {
7580                              detected[i] = blinks->blink[i].blink_detected;
7581                              degree[2 * i] = blinks->blink[i].left_blink;
7582                              degree[2 * i + 1] = blinks->blink[i].right_blink;
7583  
7584                              LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7585                                      "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7586                                      faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7587                                      degree[2 * i + 1]);
7588                          }
7589                          camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7590                                  detected, numFaces);
7591                          camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7592                                  degree, numFaces * 2);
7593                      }
7594                      IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7595                              CAM_INTF_META_FACE_SMILE, metadata) {
7596                          uint8_t degree[MAX_ROI];
7597                          uint8_t confidence[MAX_ROI];
7598                          for (size_t i = 0; i < numFaces; i++) {
7599                              degree[i] = smiles->smile[i].smile_degree;
7600                              confidence[i] = smiles->smile[i].smile_confidence;
7601  
7602                              LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7603                                      "smile_degree=%d, smile_score=%d",
7604                                      faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
7605                          }
7606                          camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7607                                  degree, numFaces);
7608                          camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7609                                  confidence, numFaces);
7610                      }
7611                      IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7612                              CAM_INTF_META_FACE_GAZE, metadata) {
7613                          int8_t angle[MAX_ROI];
7614                          int32_t direction[MAX_ROI * 3];
7615                          int8_t degree[MAX_ROI * 2];
7616                          for (size_t i = 0; i < numFaces; i++) {
7617                              angle[i] = gazes->gaze[i].gaze_angle;
7618                              direction[3 * i] = gazes->gaze[i].updown_dir;
7619                              direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7620                              direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7621                              degree[2 * i] = gazes->gaze[i].left_right_gaze;
7622                              degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7623  
7624                              LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7625                                      "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7626                                      "left_right_gaze=%d, top_bottom_gaze=%d",
7627                                      faceDetectionInfo->frame_id, i, angle[i],
7628                                      direction[3 * i], direction[3 * i + 1],
7629                                      direction[3 * i + 2],
7630                                      degree[2 * i], degree[2 * i + 1]);
7631                          }
7632                          camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7633                                  (uint8_t *)angle, numFaces);
7634                          camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7635                                  direction, numFaces * 3);
7636                          camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7637                                  (uint8_t *)degree, numFaces * 2);
7638                      }
7639                  }
7640              }
7641          }
7642      }
7643  
7644      IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7645          uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
7646          int32_t histogramBins = 0;
7647          camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
7648          camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
7649  
7650          IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7651              histogramBins = *histBins;
7652              camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7653          }
7654  
7655          if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
7656              IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7657                  // process histogram statistics info
7658                  int32_t* histogramData = NULL;
7659  
7660                  switch (stats_data->type) {
7661                  case CAM_HISTOGRAM_TYPE_BAYER:
7662                      switch (stats_data->bayer_stats.data_type) {
7663                          case CAM_STATS_CHANNEL_GR:
7664                            histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7665                            break;
7666                          case CAM_STATS_CHANNEL_GB:
7667                            histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7668                            break;
7669                          case CAM_STATS_CHANNEL_B:
7670                            histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7671                            break;
7672                          case CAM_STATS_CHANNEL_Y:
7673                          case CAM_STATS_CHANNEL_ALL:
7674                          case CAM_STATS_CHANNEL_R:
7675                          default:
7676                            histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7677                            break;
7678                      }
7679                      break;
7680                  case CAM_HISTOGRAM_TYPE_YUV:
7681                      histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
7682                      break;
7683                  }
7684  
7685                  camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
7686              }
7687          }
7688      }
7689  
7690      IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7691              CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7692          uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7693          camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7694      }
7695  
7696      IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7697              CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7698          camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7699                  CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7700      }
7701  
7702      IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7703              CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7704          size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7705                  CAM_MAX_SHADING_MAP_HEIGHT);
7706          size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7707                  CAM_MAX_SHADING_MAP_WIDTH);
7708          camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7709                  lensShadingMap->lens_shading, 4U * map_width * map_height);
7710      }
7711  
7712      IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7713          uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7714          camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7715      }
7716  
7717      IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7718          //Populate CAM_INTF_META_TONEMAP_CURVES
7719          /* ch0 = G, ch 1 = B, ch 2 = R*/
7720          if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7721              LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7722                       tonemap->tonemap_points_cnt,
7723                      CAM_MAX_TONEMAP_CURVE_SIZE);
7724              tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7725          }
7726  
7727          camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7728                          &tonemap->curves[0].tonemap_points[0][0],
7729                          tonemap->tonemap_points_cnt * 2);
7730  
7731          camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7732                          &tonemap->curves[1].tonemap_points[0][0],
7733                          tonemap->tonemap_points_cnt * 2);
7734  
7735          camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7736                          &tonemap->curves[2].tonemap_points[0][0],
7737                          tonemap->tonemap_points_cnt * 2);
7738      }
7739  
7740      IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7741              CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7742          camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7743                  CC_GAIN_MAX);
7744      }
7745  
7746      IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7747              CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7748          camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7749                  (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7750                  CC_MATRIX_COLS * CC_MATRIX_ROWS);
7751      }
7752  
7753      IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7754              CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7755          if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7756              LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7757                       toneCurve->tonemap_points_cnt,
7758                      CAM_MAX_TONEMAP_CURVE_SIZE);
7759              toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7760          }
7761          camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7762                  (float*)toneCurve->curve.tonemap_points,
7763                  toneCurve->tonemap_points_cnt * 2);
7764      }
7765  
7766      IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7767              CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7768          camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7769                  predColorCorrectionGains->gains, 4);
7770      }
7771  
7772      IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7773              CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7774          camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7775                  (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7776                  CC_MATRIX_ROWS * CC_MATRIX_COLS);
7777      }
7778  
7779      IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7780          camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7781      }
7782  
7783      IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7784          uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7785          camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7786      }
7787  
7788      IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7789          uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7790          camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7791      }
7792  
7793      IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7794          int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7795                  *effectMode);
7796          if (NAME_NOT_FOUND != val) {
7797              uint8_t fwk_effectMode = (uint8_t)val;
7798              camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7799          }
7800      }
7801  
7802      IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7803              CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7804          int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7805                  METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7806          if (NAME_NOT_FOUND != fwk_testPatternMode) {
7807              camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7808          }
7809          int32_t fwk_testPatternData[4];
7810          fwk_testPatternData[0] = testPatternData->r;
7811          fwk_testPatternData[3] = testPatternData->b;
7812          switch (gCamCapability[mCameraId]->color_arrangement) {
7813          case CAM_FILTER_ARRANGEMENT_RGGB:
7814          case CAM_FILTER_ARRANGEMENT_GRBG:
7815              fwk_testPatternData[1] = testPatternData->gr;
7816              fwk_testPatternData[2] = testPatternData->gb;
7817              break;
7818          case CAM_FILTER_ARRANGEMENT_GBRG:
7819          case CAM_FILTER_ARRANGEMENT_BGGR:
7820              fwk_testPatternData[2] = testPatternData->gr;
7821              fwk_testPatternData[1] = testPatternData->gb;
7822              break;
7823          default:
7824              LOGE("color arrangement %d is not supported",
7825                  gCamCapability[mCameraId]->color_arrangement);
7826              break;
7827          }
7828          camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7829      }
7830  
7831      IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7832          camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7833      }
7834  
7835      IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7836          String8 str((const char *)gps_methods);
7837          camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7838      }
7839  
7840      IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7841          camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7842      }
7843  
7844      IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7845          camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7846      }
7847  
7848      IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7849          uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7850          camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7851      }
7852  
7853      IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7854          uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7855          camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7856      }
7857  
7858      IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7859          int32_t fwk_thumb_size[2];
7860          fwk_thumb_size[0] = thumb_size->width;
7861          fwk_thumb_size[1] = thumb_size->height;
7862          camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7863      }
7864  
7865      // Skip reprocess metadata if there is no input stream.
7866      if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7867          IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7868              camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7869                      privateData,
7870                      MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7871          }
7872      }
7873  
7874      IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
7875          camMetadata.update(QCAMERA3_EXPOSURE_METER,
7876                  meteringMode, 1);
7877      }
7878  
7879      IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7880              CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7881          LOGD("hdr_scene_data: %d %f\n",
7882                  hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7883          uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7884          float isHdrConfidence = hdr_scene_data->hdr_confidence;
7885          camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7886                             &isHdr, 1);
7887          camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7888                             &isHdrConfidence, 1);
7889      }
7890  
7891  
7892  
7893      if (metadata->is_tuning_params_valid) {
7894          uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7895          uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7896          metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7897  
7898  
7899          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7900                  sizeof(uint32_t));
7901          data += sizeof(uint32_t);
7902  
7903          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7904                  sizeof(uint32_t));
7905          LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7906          data += sizeof(uint32_t);
7907  
7908          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7909                  sizeof(uint32_t));
7910          LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7911          data += sizeof(uint32_t);
7912  
7913          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7914                  sizeof(uint32_t));
7915          LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7916          data += sizeof(uint32_t);
7917  
7918          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7919                  sizeof(uint32_t));
7920          LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7921          data += sizeof(uint32_t);
7922  
7923          metadata->tuning_params.tuning_mod3_data_size = 0;
7924          memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7925                  sizeof(uint32_t));
7926          LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7927          data += sizeof(uint32_t);
7928  
7929          size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7930                  TUNING_SENSOR_DATA_MAX);
7931          memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7932                  count);
7933          data += count;
7934  
7935          count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7936                  TUNING_VFE_DATA_MAX);
7937          memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7938                  count);
7939          data += count;
7940  
7941          count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7942                  TUNING_CPP_DATA_MAX);
7943          memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7944                  count);
7945          data += count;
7946  
7947          count = MIN(metadata->tuning_params.tuning_cac_data_size,
7948                  TUNING_CAC_DATA_MAX);
7949          memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7950                  count);
7951          data += count;
7952  
7953          camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7954                  (int32_t *)(void *)tuning_meta_data_blob,
7955                  (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7956      }
7957  
7958      IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7959              CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7960          camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7961                  (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7962                  NEUTRAL_COL_POINTS);
7963      }
7964  
7965      IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7966          uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7967          camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7968      }
7969  
7970      IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7971          int32_t aeRegions[REGIONS_TUPLE_COUNT];
7972          // Adjust crop region from sensor output coordinate system to active
7973          // array coordinate system.
7974          cam_rect_t hAeRect = hAeRegions->rect;
7975          mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7976                  hAeRect.width, hAeRect.height);
7977  
7978          convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
7979          camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7980                  REGIONS_TUPLE_COUNT);
7981          LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7982                   aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7983                  hAeRect.left, hAeRect.top, hAeRect.width,
7984                  hAeRect.height);
7985      }
7986  
7987      if (!pendingRequest.focusStateSent) {
7988          if (pendingRequest.focusStateValid) {
7989              camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7990              LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
7991          } else {
7992              IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7993                  uint8_t fwk_afState = (uint8_t) *afState;
7994                  camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7995                  LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7996              }
7997          }
7998      }
7999  
8000      IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
8001          camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
8002          mLastFocusDistance = *focusDistance;
8003      } else {
8004          LOGE("Missing LENS_FOCUS_DISTANCE metadata. Use last known distance of %f",
8005                  mLastFocusDistance);
8006          camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , &mLastFocusDistance, 1);
8007      }
8008  
8009      IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
8010          camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
8011      }
8012  
8013      IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
8014          uint8_t fwk_lensState = *lensState;
8015          camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
8016      }
8017  
8018      IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
8019          uint32_t ab_mode = *hal_ab_mode;
8020          if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
8021                  ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
8022                ab_mode = CAM_ANTIBANDING_MODE_AUTO;
8023          }
8024          int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8025                  ab_mode);
8026          if (NAME_NOT_FOUND != val) {
8027              uint8_t fwk_ab_mode = (uint8_t)val;
8028              camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
8029          }
8030      }
8031  
8032      IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
8033          int val = lookupFwkName(SCENE_MODES_MAP,
8034                  METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
8035          if (NAME_NOT_FOUND != val) {
8036              uint8_t fwkBestshotMode = (uint8_t)val;
8037              camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
8038              LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
8039          } else {
8040              LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
8041          }
8042      }
8043  
8044      IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
8045           uint8_t fwk_mode = (uint8_t) *mode;
8046           camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
8047      }
8048  
8049      /* Constant metadata values to be update*/
8050  
8051      uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8052      camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8053  
8054      int32_t hotPixelMap[2];
8055      camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
8056  
8057      // CDS
8058      IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
8059          camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
8060      }
8061  
8062      IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
8063          int32_t fwk_hdr;
8064          int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
8065          if(*vhdr == CAM_SENSOR_HDR_OFF) {
8066              fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
8067          } else {
8068              fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
8069          }
8070  
8071          if(fwk_hdr != curr_hdr_state) {
8072             LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
8073             if(fwk_hdr)
8074                mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8075             else
8076                mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8077          }
8078          camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
8079      }
8080  
8081      //binning correction
8082      IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
8083              CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
8084          int32_t fwk_bin_mode = (int32_t) *bin_correction;
8085          camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
8086      }
8087  
8088      IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
8089          int32_t fwk_ir = (int32_t) *ir;
8090          int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
8091          int8_t is_ir_on = 0;
8092  
8093          (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
8094          if(is_ir_on != curr_ir_state) {
8095             LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
8096             if(is_ir_on)
8097                mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
8098             else
8099                mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
8100          }
8101          camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
8102      }
8103  
8104      // AEC SPEED
8105      IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
8106          camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8107      }
8108  
8109      // AWB SPEED
8110      IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8111          camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8112      }
8113  
8114      // TNR
8115      IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8116          uint8_t tnr_enable       = tnr->denoise_enable;
8117          int32_t tnr_process_type = (int32_t)tnr->process_plates;
8118          int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8119          int8_t is_tnr_on = 0;
8120  
8121          (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8122          if(is_tnr_on != curr_tnr_state) {
8123             LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8124             if(is_tnr_on)
8125                mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8126             else
8127                mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8128          }
8129  
8130          camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8131          camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8132      }
8133  
8134      // Reprocess crop data
8135      IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8136          uint8_t cnt = crop_data->num_of_streams;
8137          if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8138              // mm-qcamera-daemon only posts crop_data for streams
8139              // not linked to pproc. So no valid crop metadata is not
8140              // necessarily an error case.
8141              LOGD("No valid crop metadata entries");
8142          } else {
8143              uint32_t reproc_stream_id;
8144              if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8145                  LOGD("No reprocessible stream found, ignore crop data");
8146              } else {
8147                  int rc = NO_ERROR;
8148                  Vector<int32_t> roi_map;
8149                  int32_t *crop = new int32_t[cnt*4];
8150                  if (NULL == crop) {
8151                     rc = NO_MEMORY;
8152                  }
8153                  if (NO_ERROR == rc) {
8154                      int32_t streams_found = 0;
8155                      for (size_t i = 0; i < cnt; i++) {
8156                          if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8157                              if (pprocDone) {
8158                                  // HAL already does internal reprocessing,
8159                                  // either via reprocessing before JPEG encoding,
8160                                  // or offline postprocessing for pproc bypass case.
8161                                  crop[0] = 0;
8162                                  crop[1] = 0;
8163                                  crop[2] = mInputStreamInfo.dim.width;
8164                                  crop[3] = mInputStreamInfo.dim.height;
8165                              } else {
8166                                  crop[0] = crop_data->crop_info[i].crop.left;
8167                                  crop[1] = crop_data->crop_info[i].crop.top;
8168                                  crop[2] = crop_data->crop_info[i].crop.width;
8169                                  crop[3] = crop_data->crop_info[i].crop.height;
8170                              }
8171                              roi_map.add(crop_data->crop_info[i].roi_map.left);
8172                              roi_map.add(crop_data->crop_info[i].roi_map.top);
8173                              roi_map.add(crop_data->crop_info[i].roi_map.width);
8174                              roi_map.add(crop_data->crop_info[i].roi_map.height);
8175                              streams_found++;
8176                              LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8177                                      crop[0], crop[1], crop[2], crop[3]);
8178                              LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8179                                      crop_data->crop_info[i].roi_map.left,
8180                                      crop_data->crop_info[i].roi_map.top,
8181                                      crop_data->crop_info[i].roi_map.width,
8182                                      crop_data->crop_info[i].roi_map.height);
8183                              break;
8184  
8185                         }
8186                      }
8187                      camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8188                              &streams_found, 1);
8189                      camMetadata.update(QCAMERA3_CROP_REPROCESS,
8190                              crop, (size_t)(streams_found * 4));
8191                      if (roi_map.array()) {
8192                          camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8193                                  roi_map.array(), roi_map.size());
8194                      }
8195                 }
8196                 if (crop) {
8197                     delete [] crop;
8198                 }
8199              }
8200          }
8201      }
8202  
8203      if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8204          // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8205          // so hardcoding the CAC result to OFF mode.
8206          uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8207          camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8208      } else {
8209          IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8210              int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8211                      *cacMode);
8212              if (NAME_NOT_FOUND != val) {
8213                  uint8_t resultCacMode = (uint8_t)val;
8214                  // check whether CAC result from CB is equal to Framework set CAC mode
8215                  // If not equal then set the CAC mode came in corresponding request
8216                  if (pendingRequest.fwkCacMode != resultCacMode) {
8217                      resultCacMode = pendingRequest.fwkCacMode;
8218                  }
8219                  //Check if CAC is disabled by property
8220                  if (m_cacModeDisabled) {
8221                      resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8222                  }
8223  
8224                  LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
8225                  camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8226              } else {
8227                  LOGE("Invalid CAC camera parameter: %d", *cacMode);
8228              }
8229          }
8230      }
8231  
8232      // Post blob of cam_cds_data through vendor tag.
8233      IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8234          uint8_t cnt = cdsInfo->num_of_streams;
8235          cam_cds_data_t cdsDataOverride;
8236          memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8237          cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8238          cdsDataOverride.num_of_streams = 1;
8239          if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8240              uint32_t reproc_stream_id;
8241              if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8242                  LOGD("No reprocessible stream found, ignore cds data");
8243              } else {
8244                  for (size_t i = 0; i < cnt; i++) {
8245                      if (cdsInfo->cds_info[i].stream_id ==
8246                              reproc_stream_id) {
8247                          cdsDataOverride.cds_info[0].cds_enable =
8248                                  cdsInfo->cds_info[i].cds_enable;
8249                          break;
8250                      }
8251                  }
8252              }
8253          } else {
8254              LOGD("Invalid stream count %d in CDS_DATA", cnt);
8255          }
8256          camMetadata.update(QCAMERA3_CDS_INFO,
8257                  (uint8_t *)&cdsDataOverride,
8258                  sizeof(cam_cds_data_t));
8259      }
8260  
8261      // Ldaf calibration data
8262      if (!mLdafCalibExist) {
8263          IF_META_AVAILABLE(uint32_t, ldafCalib,
8264                  CAM_INTF_META_LDAF_EXIF, metadata) {
8265              mLdafCalibExist = true;
8266              mLdafCalib[0] = ldafCalib[0];
8267              mLdafCalib[1] = ldafCalib[1];
8268              LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8269                      ldafCalib[0], ldafCalib[1]);
8270          }
8271      }
8272  
8273      // EXIF debug data through vendor tag
8274      /*
8275       * Mobicat Mask can assume 3 values:
8276       * 1 refers to Mobicat data,
8277       * 2 refers to Stats Debug and Exif Debug Data
8278       * 3 refers to Mobicat and Stats Debug Data
8279       * We want to make sure that we are sending Exif debug data
8280       * only when Mobicat Mask is 2.
8281       */
8282      if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8283          camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8284                  (uint8_t *)(void *)mExifParams.debug_params,
8285                  sizeof(mm_jpeg_debug_exif_params_t));
8286      }
8287  
8288      // Reprocess and DDM debug data through vendor tag
8289      cam_reprocess_info_t repro_info;
8290      memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
8291      IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8292              CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
8293          memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
8294      }
8295      IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8296              CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
8297          memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
8298      }
8299      IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8300              CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
8301          memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
8302      }
8303      IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8304              CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
8305          memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
8306      }
8307      IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8308              CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
8309          memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
8310      }
8311      IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
8312          memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
8313      }
8314      IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8315              CAM_INTF_PARM_ROTATION, metadata) {
8316          memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
8317      }
8318      IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8319          memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8320      }
8321      IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8322          memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8323      }
8324      camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8325          (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
8326  
8327      // INSTANT AEC MODE
8328      IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8329              CAM_INTF_PARM_INSTANT_AEC, metadata) {
8330          camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8331      }
8332  
8333      // AF scene change
8334      IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8335          camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8336          camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
8337      }
8338  
8339      // Enable ZSL
8340      if (enableZsl != nullptr) {
8341          uint8_t value = *enableZsl ?
8342                  ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8343          camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8344      }
8345  
8346      camMetadata.update(ANDROID_STATISTICS_OIS_DATA_MODE, &pendingRequest.requestedOisDataMode, 1);
8347  
8348      // OIS Data
8349      IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8350          camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8351              &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8352          camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8353              frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8354          camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8355              frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8356          camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8357              frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8358  
8359          if (pendingRequest.requestedOisDataMode == ANDROID_STATISTICS_OIS_DATA_MODE_ON) {
8360              int64_t timeDiff = pendingRequest.timestamp -
8361                      frame_ois_data->frame_sof_timestamp_boottime;
8362  
8363              std::vector<int64_t> oisTimestamps;
8364  
8365              for (int32_t i = 0; i < frame_ois_data->num_ois_sample; i++) {
8366                  oisTimestamps.push_back(
8367                          frame_ois_data->ois_sample_timestamp_boottime[i] + timeDiff);
8368              }
8369  
8370              camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8371                      oisTimestamps.data(), frame_ois_data->num_ois_sample);
8372              camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8373                      frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8374              camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8375                      frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8376          } else {
8377              // If OIS data mode is OFF, add NULL for OIS keys.
8378              camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8379                      frame_ois_data->ois_sample_timestamp_boottime, 0);
8380              camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8381                      frame_ois_data->ois_sample_shift_pixel_x, 0);
8382              camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8383                      frame_ois_data->ois_sample_shift_pixel_y, 0);
8384          }
8385      }
8386  
8387      // DevCamDebug metadata translateFromHalMetadata AEC MOTION
8388      IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
8389              CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
8390          float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
8391          camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
8392                             &fwk_DevCamDebug_aec_camera_motion_dx, 1);
8393      }
8394      IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
8395              CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
8396          float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
8397          camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
8398                             &fwk_DevCamDebug_aec_camera_motion_dy, 1);
8399      }
8400      IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
8401              CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
8402          float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
8403          camMetadata.update(NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
8404                             &fwk_DevCamDebug_aec_subject_motion, 1);
8405      }
8406  
8407      // Camera lens calibration dynamic fields, for back camera. Same values as for static metadata.
8408      if (mCameraId == 0) {
8409          const camera_metadata_t *staticInfo = gStaticMetadata[mCameraId];
8410          camera_metadata_ro_entry_t rotation, translation, intrinsics, distortion, reference;
8411          int res;
8412          bool fail = false;
8413          res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_ROTATION,
8414                  &rotation);
8415          if (res != 0) {
8416              fail = true;
8417          }
8418          res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_TRANSLATION,
8419                  &translation);
8420          if (res != 0) {
8421              fail = true;
8422          }
8423          res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_INTRINSIC_CALIBRATION,
8424                  &intrinsics);
8425          if (res != 0) {
8426              fail = true;
8427          }
8428          res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_DISTORTION,
8429                  &distortion);
8430          if (res != 0) {
8431              fail = true;
8432          }
8433          res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_REFERENCE,
8434                  &reference);
8435          if (res != 0) {
8436              fail = true;
8437          }
8438  
8439          if (!fail) {
8440              camMetadata.update(ANDROID_LENS_POSE_ROTATION,
8441                      rotation.data.f, rotation.count);
8442              camMetadata.update(ANDROID_LENS_POSE_TRANSLATION,
8443                      translation.data.f, translation.count);
8444              camMetadata.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
8445                      intrinsics.data.f, intrinsics.count);
8446              camMetadata.update(ANDROID_LENS_DISTORTION,
8447                      distortion.data.f, distortion.count);
8448              camMetadata.update(ANDROID_LENS_POSE_REFERENCE,
8449                      reference.data.u8, reference.count);
8450          }
8451      }
8452  
8453      resultMetadata = camMetadata.release();
8454      return resultMetadata;
8455  }
8456  
8457  /*===========================================================================
8458   * FUNCTION   : saveExifParams
8459   *
8460   * DESCRIPTION:
8461   *
8462   * PARAMETERS :
8463   *   @metadata : metadata information from callback
8464   *
8465   * RETURN     : none
8466   *
8467   *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)8468  void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8469  {
8470      IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8471              CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8472          if (mExifParams.debug_params) {
8473              mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8474              mExifParams.debug_params->ae_debug_params_valid = TRUE;
8475          }
8476      }
8477      IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8478              CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8479          if (mExifParams.debug_params) {
8480              mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8481              mExifParams.debug_params->awb_debug_params_valid = TRUE;
8482          }
8483      }
8484      IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8485              CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8486          if (mExifParams.debug_params) {
8487              mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8488              mExifParams.debug_params->af_debug_params_valid = TRUE;
8489          }
8490      }
8491      IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8492              CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8493          if (mExifParams.debug_params) {
8494              mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8495              mExifParams.debug_params->asd_debug_params_valid = TRUE;
8496          }
8497      }
8498      IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8499              CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8500          if (mExifParams.debug_params) {
8501              mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8502              mExifParams.debug_params->stats_debug_params_valid = TRUE;
8503          }
8504      }
8505      IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8506              CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8507          if (mExifParams.debug_params) {
8508              mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8509              mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8510          }
8511      }
8512      IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8513              CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8514          if (mExifParams.debug_params) {
8515              mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8516              mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8517          }
8518      }
8519      IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8520              CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8521          if (mExifParams.debug_params) {
8522              mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8523              mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8524          }
8525      }
8526  }
8527  
8528  /*===========================================================================
8529   * FUNCTION   : get3AExifParams
8530   *
8531   * DESCRIPTION:
8532   *
8533   * PARAMETERS : none
8534   *
8535   *
8536   * RETURN     : mm_jpeg_exif_params_t
8537   *
8538   *==========================================================================*/
get3AExifParams()8539  mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8540  {
8541      return mExifParams;
8542  }
8543  
8544  /*===========================================================================
8545   * FUNCTION   : translateCbUrgentMetadataToResultMetadata
8546   *
8547   * DESCRIPTION:
8548   *
8549   * PARAMETERS :
8550   *   @metadata : metadata information from callback
8551   *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8552   *                               urgent metadata in a batch. Always true for
8553   *                               non-batch mode.
8554   *   @frame_number :             frame number for this urgent metadata
8555   *   @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8556   *                         i.e. even though it doesn't map to a valid partial
8557   *                         frame number, its metadata entries should be kept.
8558   * RETURN     : camera_metadata_t*
8559   *              metadata in a format specified by fwk
8560   *==========================================================================*/
8561  camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,uint32_t frame_number,bool isJumpstartMetadata)8562  QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
8563                                  (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8564                                   uint32_t frame_number, bool isJumpstartMetadata)
8565  {
8566      CameraMetadata camMetadata;
8567      camera_metadata_t *resultMetadata;
8568  
8569      if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
8570          /* In batch mode, use empty metadata if this is not the last in batch
8571           */
8572          resultMetadata = allocate_camera_metadata(0, 0);
8573          return resultMetadata;
8574      }
8575  
8576      IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8577          uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8578          camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8579          LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8580      }
8581  
8582      IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8583          camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8584                  &aecTrigger->trigger, 1);
8585          camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8586                  &aecTrigger->trigger_id, 1);
8587          LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8588                   aecTrigger->trigger);
8589          LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8590                  aecTrigger->trigger_id);
8591      }
8592  
8593      IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8594          uint8_t fwk_ae_state = (uint8_t) *ae_state;
8595          camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8596          LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8597      }
8598  
8599      IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8600          int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8601          if (NAME_NOT_FOUND != val) {
8602              uint8_t fwkAfMode = (uint8_t)val;
8603              camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8604              LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8605          } else {
8606              LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8607                      val);
8608          }
8609      }
8610  
8611      IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8612          LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8613              af_trigger->trigger);
8614          LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8615              af_trigger->trigger_id);
8616  
8617          IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8618              mAfTrigger = *af_trigger;
8619              uint32_t fwk_AfState = (uint32_t) *afState;
8620  
8621              // If this is the result for a new trigger, check if there is new early
8622              // af state. If there is, use the last af state for all results
8623              // preceding current partial frame number.
8624              for (auto & pendingRequest : mPendingRequestsList) {
8625                  if (pendingRequest.frame_number < frame_number) {
8626                      pendingRequest.focusStateValid = true;
8627                      pendingRequest.focusState = fwk_AfState;
8628                  } else if (pendingRequest.frame_number == frame_number) {
8629                      IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8630                          // Check if early AF state for trigger exists. If yes, send AF state as
8631                          // partial result for better latency.
8632                          uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8633                          pendingRequest.focusStateSent = true;
8634                          camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8635                          LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8636                                   frame_number, fwkEarlyAfState);
8637                      }
8638                  }
8639              }
8640          }
8641      }
8642      camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8643          &mAfTrigger.trigger, 1);
8644      camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8645  
8646      IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8647          /*af regions*/
8648          cam_rect_t hAfRect = hAfRegions->rect;
8649          int32_t afRegions[REGIONS_TUPLE_COUNT];
8650          // Adjust crop region from sensor output coordinate system to active
8651          // array coordinate system.
8652          mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8653                  hAfRect.width, hAfRect.height);
8654  
8655          convertToRegions(hAfRect, afRegions, hAfRegions->weight);
8656          camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8657                  REGIONS_TUPLE_COUNT);
8658          LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8659                   afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8660                  hAfRect.left, hAfRect.top, hAfRect.width,
8661                  hAfRect.height);
8662      }
8663  
8664      // AF region confidence
8665      IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8666          camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8667      }
8668  
8669      IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8670          int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8671                  METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8672          if (NAME_NOT_FOUND != val) {
8673              uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8674              camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8675              LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8676          } else {
8677              LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8678          }
8679      }
8680  
8681      uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8682      uint32_t aeMode = CAM_AE_MODE_MAX;
8683      int32_t flashMode = CAM_FLASH_MODE_MAX;
8684      int32_t redeye = -1;
8685      IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8686          aeMode = *pAeMode;
8687      }
8688      IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8689          flashMode = *pFlashMode;
8690      }
8691      IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8692          redeye = *pRedeye;
8693      }
8694  
8695      if (1 == redeye) {
8696          fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8697          camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8698      } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8699          int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8700                  flashMode);
8701          if (NAME_NOT_FOUND != val) {
8702              fwk_aeMode = (uint8_t)val;
8703              camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8704          } else {
8705              LOGE("Unsupported flash mode %d", flashMode);
8706          }
8707      } else if (aeMode == CAM_AE_MODE_ON) {
8708          fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8709          camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8710      } else if (aeMode == CAM_AE_MODE_OFF) {
8711          fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8712          camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8713      } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8714          fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
8715          camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8716      } else {
8717          LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8718                "flashMode:%d, aeMode:%u!!!",
8719                   redeye, flashMode, aeMode);
8720      }
8721      if (mInstantAEC) {
8722          // Increment frame Idx count untill a bound reached for instant AEC.
8723          mInstantAecFrameIdxCount++;
8724          IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8725                  CAM_INTF_META_AEC_INFO, metadata) {
8726              LOGH("ae_params->settled = %d",ae_params->settled);
8727              // If AEC settled, or if number of frames reached bound value,
8728              // should reset instant AEC.
8729              if (ae_params->settled ||
8730                      (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8731                  LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8732                  mInstantAEC = false;
8733                  mResetInstantAEC = true;
8734                  mInstantAecFrameIdxCount = 0;
8735              }
8736          }
8737      }
8738  
8739      IF_META_AVAILABLE(int32_t, af_tof_confidence,
8740              CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8741          IF_META_AVAILABLE(int32_t, af_tof_distance,
8742                  CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8743              int32_t fwk_af_tof_confidence = *af_tof_confidence;
8744              int32_t fwk_af_tof_distance = *af_tof_distance;
8745              if (fwk_af_tof_confidence == 1) {
8746                  mSceneDistance = fwk_af_tof_distance;
8747              } else {
8748                  mSceneDistance = -1;
8749              }
8750              LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8751                       fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8752          }
8753      }
8754      camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8755  
8756      resultMetadata = camMetadata.release();
8757      return resultMetadata;
8758  }
8759  
8760  /*===========================================================================
8761   * FUNCTION   : dumpMetadataToFile
8762   *
8763   * DESCRIPTION: Dumps tuning metadata to file system
8764   *
8765   * PARAMETERS :
8766   *   @meta           : tuning metadata
8767   *   @dumpFrameCount : current dump frame count
8768   *   @enabled        : Enable mask
8769   *
8770   *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)8771  void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8772                                                     uint32_t &dumpFrameCount,
8773                                                     bool enabled,
8774                                                     const char *type,
8775                                                     uint32_t frameNumber)
8776  {
8777      //Some sanity checks
8778      if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8779          LOGE("Tuning sensor data size bigger than expected %d: %d",
8780                meta.tuning_sensor_data_size,
8781                TUNING_SENSOR_DATA_MAX);
8782          return;
8783      }
8784  
8785      if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8786          LOGE("Tuning VFE data size bigger than expected %d: %d",
8787                meta.tuning_vfe_data_size,
8788                TUNING_VFE_DATA_MAX);
8789          return;
8790      }
8791  
8792      if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8793          LOGE("Tuning CPP data size bigger than expected %d: %d",
8794                meta.tuning_cpp_data_size,
8795                TUNING_CPP_DATA_MAX);
8796          return;
8797      }
8798  
8799      if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8800          LOGE("Tuning CAC data size bigger than expected %d: %d",
8801                meta.tuning_cac_data_size,
8802                TUNING_CAC_DATA_MAX);
8803          return;
8804      }
8805      //
8806  
8807      if(enabled){
8808          char timeBuf[FILENAME_MAX];
8809          char buf[FILENAME_MAX];
8810          memset(buf, 0, sizeof(buf));
8811          memset(timeBuf, 0, sizeof(timeBuf));
8812          time_t current_time;
8813          struct tm * timeinfo;
8814          time (&current_time);
8815          timeinfo = localtime (&current_time);
8816          if (timeinfo != NULL) {
8817              strftime (timeBuf, sizeof(timeBuf),
8818                      QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8819          }
8820          String8 filePath(timeBuf);
8821          snprintf(buf,
8822                  sizeof(buf),
8823                  "%dm_%s_%d.bin",
8824                  dumpFrameCount,
8825                  type,
8826                  frameNumber);
8827          filePath.append(buf);
8828          int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8829          if (file_fd >= 0) {
8830              ssize_t written_len = 0;
8831              meta.tuning_data_version = TUNING_DATA_VERSION;
8832              void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8833              written_len += write(file_fd, data, sizeof(uint32_t));
8834              data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8835              LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8836              written_len += write(file_fd, data, sizeof(uint32_t));
8837              data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8838              LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8839              written_len += write(file_fd, data, sizeof(uint32_t));
8840              data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8841              LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8842              written_len += write(file_fd, data, sizeof(uint32_t));
8843              data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8844              LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8845              written_len += write(file_fd, data, sizeof(uint32_t));
8846              meta.tuning_mod3_data_size = 0;
8847              data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8848              LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8849              written_len += write(file_fd, data, sizeof(uint32_t));
8850              size_t total_size = meta.tuning_sensor_data_size;
8851              data = (void *)((uint8_t *)&meta.data);
8852              written_len += write(file_fd, data, total_size);
8853              total_size = meta.tuning_vfe_data_size;
8854              data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8855              written_len += write(file_fd, data, total_size);
8856              total_size = meta.tuning_cpp_data_size;
8857              data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8858              written_len += write(file_fd, data, total_size);
8859              total_size = meta.tuning_cac_data_size;
8860              data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8861              written_len += write(file_fd, data, total_size);
8862              close(file_fd);
8863          }else {
8864              LOGE("fail to open file for metadata dumping");
8865          }
8866      }
8867  }
8868  
8869  /*===========================================================================
8870   * FUNCTION   : cleanAndSortStreamInfo
8871   *
8872   * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8873   *              and sort them such that raw stream is at the end of the list
8874   *              This is a workaround for camera daemon constraint.
8875   *
8876   * PARAMETERS : None
8877   *
8878   *==========================================================================*/
cleanAndSortStreamInfo()8879  void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8880  {
8881      List<stream_info_t *> newStreamInfo;
8882  
8883      /*clean up invalid streams*/
8884      for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8885              it != mStreamInfo.end();) {
8886          if(((*it)->status) == INVALID){
8887              QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8888              delete channel;
8889              free(*it);
8890              it = mStreamInfo.erase(it);
8891          } else {
8892              it++;
8893          }
8894      }
8895  
8896      // Move preview/video/callback/snapshot streams into newList
8897      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8898              it != mStreamInfo.end();) {
8899          if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8900                  (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8901                  (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8902              newStreamInfo.push_back(*it);
8903              it = mStreamInfo.erase(it);
8904          } else
8905              it++;
8906      }
8907      // Move raw streams into newList
8908      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8909              it != mStreamInfo.end();) {
8910          newStreamInfo.push_back(*it);
8911          it = mStreamInfo.erase(it);
8912      }
8913  
8914      mStreamInfo = newStreamInfo;
8915  
8916      // Make sure that stream IDs are unique.
8917      uint32_t id = 0;
8918      for (auto streamInfo : mStreamInfo) {
8919          streamInfo->id = id++;
8920      }
8921  
8922  }
8923  
8924  /*===========================================================================
8925   * FUNCTION   : extractJpegMetadata
8926   *
8927   * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8928   *              JPEG metadata is cached in HAL, and return as part of capture
8929   *              result when metadata is returned from camera daemon.
8930   *
8931   * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8932   *              @request:      capture request
8933   *
8934   *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)8935  void QCamera3HardwareInterface::extractJpegMetadata(
8936          CameraMetadata& jpegMetadata,
8937          const camera3_capture_request_t *request)
8938  {
8939      CameraMetadata frame_settings;
8940      frame_settings = request->settings;
8941  
8942      if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8943          jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8944                  frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8945                  frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8946  
8947      if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8948          jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8949                  frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8950                  frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8951  
8952      if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8953          jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8954                  frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8955                  frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8956  
8957      if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8958          jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8959                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8960                  frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8961  
8962      if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8963          jpegMetadata.update(ANDROID_JPEG_QUALITY,
8964                  frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8965                  frame_settings.find(ANDROID_JPEG_QUALITY).count);
8966  
8967      if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8968          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8969                  frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8970                  frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8971  
8972      if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8973          int32_t thumbnail_size[2];
8974          thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8975          thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8976          if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8977              int32_t orientation =
8978                    frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8979              if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
8980                 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8981                 int32_t temp;
8982                 temp = thumbnail_size[0];
8983                 thumbnail_size[0] = thumbnail_size[1];
8984                 thumbnail_size[1] = temp;
8985              }
8986           }
8987           jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8988                  thumbnail_size,
8989                  frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8990      }
8991  
8992  }
8993  
8994  /*===========================================================================
8995   * FUNCTION   : convertToRegions
8996   *
8997   * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8998   *
8999   * PARAMETERS :
9000   *   @rect   : cam_rect_t struct to convert
9001   *   @region : int32_t destination array
9002   *   @weight : if we are converting from cam_area_t, weight is valid
9003   *             else weight = -1
9004   *
9005   *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)9006  void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
9007          int32_t *region, int weight)
9008  {
9009      region[FACE_LEFT] = rect.left;
9010      region[FACE_TOP] = rect.top;
9011      region[FACE_RIGHT] = rect.left + rect.width;
9012      region[FACE_BOTTOM] = rect.top + rect.height;
9013      if (weight > -1) {
9014          region[FACE_WEIGHT] = weight;
9015      }
9016  }
9017  
9018  /*===========================================================================
9019   * FUNCTION   : convertFromRegions
9020   *
9021   * DESCRIPTION: helper method to convert from array to cam_rect_t
9022   *
9023   * PARAMETERS :
9024   *   @rect   : cam_rect_t struct to convert
9025   *   @region : int32_t destination array
9026   *   @weight : if we are converting from cam_area_t, weight is valid
9027   *             else weight = -1
9028   *
9029   *==========================================================================*/
convertFromRegions(cam_area_t & roi,const CameraMetadata & frame_settings,uint32_t tag)9030  void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
9031          const CameraMetadata &frame_settings, uint32_t tag)
9032  {
9033      int32_t x_min = frame_settings.find(tag).data.i32[0];
9034      int32_t y_min = frame_settings.find(tag).data.i32[1];
9035      int32_t x_max = frame_settings.find(tag).data.i32[2];
9036      int32_t y_max = frame_settings.find(tag).data.i32[3];
9037      roi.weight = frame_settings.find(tag).data.i32[4];
9038      roi.rect.left = x_min;
9039      roi.rect.top = y_min;
9040      roi.rect.width = x_max - x_min;
9041      roi.rect.height = y_max - y_min;
9042  }
9043  
9044  /*===========================================================================
9045   * FUNCTION   : resetIfNeededROI
9046   *
9047   * DESCRIPTION: helper method to reset the roi if it is greater than scaler
9048   *              crop region
9049   *
9050   * PARAMETERS :
9051   *   @roi       : cam_area_t struct to resize
9052   *   @scalerCropRegion : cam_crop_region_t region to compare against
9053   *
9054   *
9055   *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)9056  bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
9057                                                   const cam_crop_region_t* scalerCropRegion)
9058  {
9059      int32_t roi_x_max = roi->rect.width + roi->rect.left;
9060      int32_t roi_y_max = roi->rect.height + roi->rect.top;
9061      int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
9062      int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
9063  
9064      /* According to spec weight = 0 is used to indicate roi needs to be disabled
9065       * without having this check the calculations below to validate if the roi
9066       * is inside scalar crop region will fail resulting in the roi not being
9067       * reset causing algorithm to continue to use stale roi window
9068       */
9069      if (roi->weight == 0) {
9070          return true;
9071      }
9072  
9073      if ((roi_x_max < scalerCropRegion->left) ||
9074          // right edge of roi window is left of scalar crop's left edge
9075          (roi_y_max < scalerCropRegion->top)  ||
9076          // bottom edge of roi window is above scalar crop's top edge
9077          (roi->rect.left > crop_x_max) ||
9078          // left edge of roi window is beyond(right) of scalar crop's right edge
9079          (roi->rect.top > crop_y_max)){
9080          // top edge of roi windo is above scalar crop's top edge
9081          return false;
9082      }
9083      if (roi->rect.left < scalerCropRegion->left) {
9084          roi->rect.left = scalerCropRegion->left;
9085      }
9086      if (roi->rect.top < scalerCropRegion->top) {
9087          roi->rect.top = scalerCropRegion->top;
9088      }
9089      if (roi_x_max > crop_x_max) {
9090          roi_x_max = crop_x_max;
9091      }
9092      if (roi_y_max > crop_y_max) {
9093          roi_y_max = crop_y_max;
9094      }
9095      roi->rect.width = roi_x_max - roi->rect.left;
9096      roi->rect.height = roi_y_max - roi->rect.top;
9097      return true;
9098  }
9099  
9100  /*===========================================================================
9101   * FUNCTION   : convertLandmarks
9102   *
9103   * DESCRIPTION: helper method to extract the landmarks from face detection info
9104   *
9105   * PARAMETERS :
9106   *   @landmark_data : input landmark data to be converted
9107   *   @landmarks : int32_t destination array
9108   *
9109   *
9110   *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)9111  void QCamera3HardwareInterface::convertLandmarks(
9112          cam_face_landmarks_info_t landmark_data,
9113          int32_t *landmarks)
9114  {
9115      if (landmark_data.is_left_eye_valid) {
9116          landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
9117          landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
9118      } else {
9119          landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9120          landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9121      }
9122  
9123      if (landmark_data.is_right_eye_valid) {
9124          landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
9125          landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
9126      } else {
9127          landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9128          landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9129      }
9130  
9131      if (landmark_data.is_mouth_valid) {
9132          landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
9133          landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
9134      } else {
9135          landmarks[MOUTH_X] = FACE_INVALID_POINT;
9136          landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9137      }
9138  }
9139  
9140  /*===========================================================================
9141   * FUNCTION   : setInvalidLandmarks
9142   *
9143   * DESCRIPTION: helper method to set invalid landmarks
9144   *
9145   * PARAMETERS :
9146   *   @landmarks : int32_t destination array
9147   *
9148   *
9149   *==========================================================================*/
setInvalidLandmarks(int32_t * landmarks)9150  void QCamera3HardwareInterface::setInvalidLandmarks(
9151          int32_t *landmarks)
9152  {
9153      landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9154      landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9155      landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9156      landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9157      landmarks[MOUTH_X] = FACE_INVALID_POINT;
9158      landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9159  }
9160  
9161  #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
9162  
9163  /*===========================================================================
9164   * FUNCTION   : getCapabilities
9165   *
9166   * DESCRIPTION: query camera capability from back-end
9167   *
9168   * PARAMETERS :
9169   *   @ops  : mm-interface ops structure
9170   *   @cam_handle  : camera handle for which we need capability
9171   *
9172   * RETURN     : ptr type of capability structure
9173   *              capability for success
9174   *              NULL for failure
9175   *==========================================================================*/
getCapabilities(mm_camera_ops_t * ops,uint32_t cam_handle)9176  cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
9177          uint32_t cam_handle)
9178  {
9179      int rc = NO_ERROR;
9180      QCamera3HeapMemory *capabilityHeap = NULL;
9181      cam_capability_t *cap_ptr = NULL;
9182  
9183      if (ops == NULL) {
9184          LOGE("Invalid arguments");
9185          return NULL;
9186      }
9187  
9188      capabilityHeap = new QCamera3HeapMemory(1);
9189      if (capabilityHeap == NULL) {
9190          LOGE("creation of capabilityHeap failed");
9191          return NULL;
9192      }
9193  
9194      /* Allocate memory for capability buffer */
9195      rc = capabilityHeap->allocate(sizeof(cam_capability_t));
9196      if(rc != OK) {
9197          LOGE("No memory for cappability");
9198          goto allocate_failed;
9199      }
9200  
9201      /* Map memory for capability buffer */
9202      memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9203  
9204      rc = ops->map_buf(cam_handle,
9205              CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9206              sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9207      if(rc < 0) {
9208          LOGE("failed to map capability buffer");
9209          rc = FAILED_TRANSACTION;
9210          goto map_failed;
9211      }
9212  
9213      /* Query Capability */
9214      rc = ops->query_capability(cam_handle);
9215      if(rc < 0) {
9216          LOGE("failed to query capability");
9217          rc = FAILED_TRANSACTION;
9218          goto query_failed;
9219      }
9220  
9221      cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9222      if (cap_ptr == NULL) {
9223          LOGE("out of memory");
9224          rc = NO_MEMORY;
9225          goto query_failed;
9226      }
9227  
9228      memset(cap_ptr, 0, sizeof(cam_capability_t));
9229      memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9230  
9231      int index;
9232      for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9233          cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9234          p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9235          p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9236      }
9237  
9238  query_failed:
9239      ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9240  map_failed:
9241      capabilityHeap->deallocate();
9242  allocate_failed:
9243      delete capabilityHeap;
9244  
9245      if (rc != NO_ERROR) {
9246          return NULL;
9247      } else {
9248          return cap_ptr;
9249      }
9250  }
9251  
9252  /*===========================================================================
9253   * FUNCTION   : initCapabilities
9254   *
9255   * DESCRIPTION: initialize camera capabilities in static data struct
9256   *
9257   * PARAMETERS :
9258   *   @cameraId  : camera Id
9259   *
9260   * RETURN     : int32_t type of status
9261   *              NO_ERROR  -- success
9262   *              none-zero failure code
9263   *==========================================================================*/
initCapabilities(uint32_t cameraId)9264  int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9265  {
9266      int rc = 0;
9267      mm_camera_vtbl_t *cameraHandle = NULL;
9268      uint32_t handle = 0;
9269  
9270      rc = camera_open((uint8_t)cameraId, &cameraHandle);
9271      if (rc) {
9272          LOGE("camera_open failed. rc = %d", rc);
9273          goto open_failed;
9274      }
9275      if (!cameraHandle) {
9276          LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9277          goto open_failed;
9278      }
9279  
9280      handle = get_main_camera_handle(cameraHandle->camera_handle);
9281      gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9282      if (gCamCapability[cameraId] == NULL) {
9283          rc = FAILED_TRANSACTION;
9284          goto failed_op;
9285      }
9286  
9287      gCamCapability[cameraId]->camera_index = cameraId;
9288      if (is_dual_camera_by_idx(cameraId)) {
9289          handle = get_aux_camera_handle(cameraHandle->camera_handle);
9290          gCamCapability[cameraId]->aux_cam_cap =
9291                  getCapabilities(cameraHandle->ops, handle);
9292          if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9293              rc = FAILED_TRANSACTION;
9294              free(gCamCapability[cameraId]);
9295              goto failed_op;
9296          }
9297  
9298          // Copy the main camera capability to main_cam_cap struct
9299          gCamCapability[cameraId]->main_cam_cap =
9300                          (cam_capability_t *)malloc(sizeof(cam_capability_t));
9301          if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9302              LOGE("out of memory");
9303              rc = NO_MEMORY;
9304              goto failed_op;
9305          }
9306          memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9307                  sizeof(cam_capability_t));
9308      }
9309  failed_op:
9310      cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9311      cameraHandle = NULL;
9312  open_failed:
9313      return rc;
9314  }
9315  
9316  /*==========================================================================
9317   * FUNCTION   : get3Aversion
9318   *
9319   * DESCRIPTION: get the Q3A S/W version
9320   *
9321   * PARAMETERS :
9322   *  @sw_version: Reference of Q3A structure which will hold version info upon
9323   *               return
9324   *
9325   * RETURN     : None
9326   *
9327   *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)9328  void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9329  {
9330      if(gCamCapability[mCameraId])
9331          sw_version = gCamCapability[mCameraId]->q3a_version;
9332      else
9333          LOGE("Capability structure NULL!");
9334  }
9335  
9336  
9337  /*===========================================================================
9338   * FUNCTION   : initParameters
9339   *
9340   * DESCRIPTION: initialize camera parameters
9341   *
9342   * PARAMETERS :
9343   *
9344   * RETURN     : int32_t type of status
9345   *              NO_ERROR  -- success
9346   *              none-zero failure code
9347   *==========================================================================*/
initParameters()9348  int QCamera3HardwareInterface::initParameters()
9349  {
9350      int rc = 0;
9351  
9352      //Allocate Set Param Buffer
9353      mParamHeap = new QCamera3HeapMemory(1);
9354      rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9355      if(rc != OK) {
9356          rc = NO_MEMORY;
9357          LOGE("Failed to allocate SETPARM Heap memory");
9358          delete mParamHeap;
9359          mParamHeap = NULL;
9360          return rc;
9361      }
9362  
9363      //Map memory for parameters buffer
9364      rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9365              CAM_MAPPING_BUF_TYPE_PARM_BUF,
9366              mParamHeap->getFd(0),
9367              sizeof(metadata_buffer_t),
9368              (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9369      if(rc < 0) {
9370          LOGE("failed to map SETPARM buffer");
9371          rc = FAILED_TRANSACTION;
9372          mParamHeap->deallocate();
9373          delete mParamHeap;
9374          mParamHeap = NULL;
9375          return rc;
9376      }
9377  
9378      mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9379  
9380      mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9381      return rc;
9382  }
9383  
9384  /*===========================================================================
9385   * FUNCTION   : deinitParameters
9386   *
9387   * DESCRIPTION: de-initialize camera parameters
9388   *
9389   * PARAMETERS :
9390   *
9391   * RETURN     : NONE
9392   *==========================================================================*/
deinitParameters()9393  void QCamera3HardwareInterface::deinitParameters()
9394  {
9395      mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9396              CAM_MAPPING_BUF_TYPE_PARM_BUF);
9397  
9398      mParamHeap->deallocate();
9399      delete mParamHeap;
9400      mParamHeap = NULL;
9401  
9402      mParameters = NULL;
9403  
9404      free(mPrevParameters);
9405      mPrevParameters = NULL;
9406  }
9407  
9408  /*===========================================================================
9409   * FUNCTION   : calcMaxJpegSize
9410   *
9411   * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9412   *
9413   * PARAMETERS :
9414   *
9415   * RETURN     : max_jpeg_size
9416   *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)9417  size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9418  {
9419      size_t max_jpeg_size = 0;
9420      size_t temp_width, temp_height;
9421      size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9422              MAX_SIZES_CNT);
9423      for (size_t i = 0; i < count; i++) {
9424          temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9425          temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9426          if (temp_width * temp_height > max_jpeg_size ) {
9427              max_jpeg_size = temp_width * temp_height;
9428          }
9429      }
9430      max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9431      return max_jpeg_size;
9432  }
9433  
9434  /*===========================================================================
9435   * FUNCTION   : getMaxRawSize
9436   *
9437   * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9438   *
9439   * PARAMETERS :
9440   *
9441   * RETURN     : Largest supported Raw Dimension
9442   *==========================================================================*/
getMaxRawSize(uint32_t camera_id)9443  cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9444  {
9445      int max_width = 0;
9446      cam_dimension_t maxRawSize;
9447  
9448      memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9449      for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9450          if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9451              max_width = gCamCapability[camera_id]->raw_dim[i].width;
9452              maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9453          }
9454      }
9455      return maxRawSize;
9456  }
9457  
9458  
9459  /*===========================================================================
9460   * FUNCTION   : calcMaxJpegDim
9461   *
9462   * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9463   *
9464   * PARAMETERS :
9465   *
9466   * RETURN     : max_jpeg_dim
9467   *==========================================================================*/
calcMaxJpegDim()9468  cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9469  {
9470      cam_dimension_t max_jpeg_dim;
9471      cam_dimension_t curr_jpeg_dim;
9472      max_jpeg_dim.width = 0;
9473      max_jpeg_dim.height = 0;
9474      curr_jpeg_dim.width = 0;
9475      curr_jpeg_dim.height = 0;
9476      for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9477          curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9478          curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9479          if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9480              max_jpeg_dim.width * max_jpeg_dim.height ) {
9481              max_jpeg_dim.width = curr_jpeg_dim.width;
9482              max_jpeg_dim.height = curr_jpeg_dim.height;
9483          }
9484      }
9485      return max_jpeg_dim;
9486  }
9487  
9488  /*===========================================================================
9489   * FUNCTION   : addStreamConfig
9490   *
9491   * DESCRIPTION: adds the stream configuration to the array
9492   *
9493   * PARAMETERS :
9494   * @available_stream_configs : pointer to stream configuration array
9495   * @scalar_format            : scalar format
9496   * @dim                      : configuration dimension
9497   * @config_type              : input or output configuration type
9498   *
9499   * RETURN     : NONE
9500   *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)9501  void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9502          int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9503  {
9504      available_stream_configs.add(scalar_format);
9505      available_stream_configs.add(dim.width);
9506      available_stream_configs.add(dim.height);
9507      available_stream_configs.add(config_type);
9508  }
9509  
9510  /*===========================================================================
9511   * FUNCTION   : suppportBurstCapture
9512   *
9513   * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9514   *
9515   * PARAMETERS :
9516   *   @cameraId  : camera Id
9517   *
9518   * RETURN     : true if camera supports BURST_CAPTURE
9519   *              false otherwise
9520   *==========================================================================*/
supportBurstCapture(uint32_t cameraId)9521  bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9522  {
9523      const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9524      const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9525      const int32_t highResWidth = 3264;
9526      const int32_t highResHeight = 2448;
9527  
9528      if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9529          // Maximum resolution images cannot be captured at >= 10fps
9530          // -> not supporting BURST_CAPTURE
9531          return false;
9532      }
9533  
9534      if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9535          // Maximum resolution images can be captured at >= 20fps
9536          // --> supporting BURST_CAPTURE
9537          return true;
9538      }
9539  
9540      // Find the smallest highRes resolution, or largest resolution if there is none
9541      size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9542              MAX_SIZES_CNT);
9543      size_t highRes = 0;
9544      while ((highRes + 1 < totalCnt) &&
9545              (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9546              gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9547              highResWidth * highResHeight)) {
9548          highRes++;
9549      }
9550      if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9551          return true;
9552      } else {
9553          return false;
9554      }
9555  }
9556  
9557  /*===========================================================================
9558   * FUNCTION   : getPDStatIndex
9559   *
9560   * DESCRIPTION: Return the meta raw phase detection statistics index if present
9561   *
9562   * PARAMETERS :
9563   *   @caps    : camera capabilities
9564   *
9565   * RETURN     : int32_t type
9566   *              non-negative - on success
9567   *              -1 - on failure
9568   *==========================================================================*/
getPDStatIndex(cam_capability_t * caps)9569  int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9570      if (nullptr == caps) {
9571          return -1;
9572      }
9573  
9574      uint32_t metaRawCount = caps->meta_raw_channel_count;
9575      int32_t ret = -1;
9576      for (size_t i = 0; i < metaRawCount; i++) {
9577          if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9578              ret = i;
9579              break;
9580          }
9581      }
9582  
9583      return ret;
9584  }
9585  
9586  /*===========================================================================
9587   * FUNCTION   : initStaticMetadata
9588   *
9589   * DESCRIPTION: initialize the static metadata
9590   *
9591   * PARAMETERS :
9592   *   @cameraId  : camera Id
9593   *
9594   * RETURN     : int32_t type of status
9595   *              0  -- success
9596   *              non-zero failure code
9597   *==========================================================================*/
initStaticMetadata(uint32_t cameraId)9598  int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9599  {
9600      int rc = 0;
9601      CameraMetadata staticInfo;
9602      size_t count = 0;
9603      bool limitedDevice = false;
9604      char prop[PROPERTY_VALUE_MAX];
9605      bool supportBurst = false;
9606      Vector<int32_t> available_characteristics_keys;
9607  
9608      supportBurst = supportBurstCapture(cameraId);
9609  
9610      /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9611       * guaranteed or if min fps of max resolution is less than 20 fps, its
9612       * advertised as limited device*/
9613      limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9614              (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9615              (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9616              !supportBurst;
9617  
9618      uint8_t supportedHwLvl = limitedDevice ?
9619              ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
9620  #ifndef USE_HAL_3_3
9621              // LEVEL_3 - This device will support level 3.
9622              ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9623  #else
9624              ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
9625  #endif
9626  
9627      staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9628              &supportedHwLvl, 1);
9629  
9630      bool facingBack = false;
9631      if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9632              (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9633          facingBack = true;
9634      }
9635      /*HAL 3 only*/
9636      staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9637                      &gCamCapability[cameraId]->min_focus_distance, 1);
9638  
9639      staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9640                      &gCamCapability[cameraId]->hyper_focal_distance, 1);
9641  
9642      /*should be using focal lengths but sensor doesn't provide that info now*/
9643      staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9644                        &gCamCapability[cameraId]->focal_length,
9645                        1);
9646  
9647      staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9648              gCamCapability[cameraId]->apertures,
9649              MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9650  
9651      staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9652              gCamCapability[cameraId]->filter_densities,
9653              MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9654  
9655  
9656      uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9657      size_t mode_count =
9658          MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9659      for (size_t i = 0; i < mode_count; i++) {
9660        available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9661      }
9662      staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9663              available_opt_stab_modes, mode_count);
9664  
9665      int32_t lens_shading_map_size[] = {
9666              MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9667              MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9668      staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9669                        lens_shading_map_size,
9670                        sizeof(lens_shading_map_size)/sizeof(int32_t));
9671  
9672      // Lens calibration for MOTION_TRACKING, back camera only
9673      if (cameraId == 0) {
9674  
9675          float poseRotation[4] = {1.0f, 0.f, 0.f, 0.f}; // quaternion rotation
9676          float poseTranslation[3] = {0.0f, 0.f, 0.f}; // xyz translation, meters
9677          uint8_t poseReference = ANDROID_LENS_POSE_REFERENCE_GYROSCOPE;
9678          // TODO: b/70565622 - these should have better identity values as a fallback
9679          float cameraIntrinsics[5] = {100.f, 100.f, 0.f, 1000, 1000}; // fx,fy,sx,cx,cy
9680          float radialDistortion[5] = {0.f, 0.f, 0.f, 0.f, 0.f}; // identity
9681  
9682          bool success = readSensorCalibration(
9683                  gCamCapability[cameraId]->active_array_size.width,
9684                  poseRotation, poseTranslation, cameraIntrinsics, radialDistortion);
9685          if (!success) {
9686              ALOGE("Using identity lens calibration values");
9687          }
9688          staticInfo.update(ANDROID_LENS_POSE_ROTATION,
9689                  poseRotation, sizeof(poseRotation)/sizeof(float));
9690          staticInfo.update(ANDROID_LENS_POSE_TRANSLATION,
9691                  poseTranslation, sizeof(poseTranslation)/sizeof(float));
9692          staticInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
9693                  cameraIntrinsics, sizeof(cameraIntrinsics)/sizeof(float));
9694          staticInfo.update(ANDROID_LENS_DISTORTION,
9695                  radialDistortion, sizeof(radialDistortion)/sizeof(float));
9696          staticInfo.update(ANDROID_LENS_POSE_REFERENCE,
9697                  &poseReference, sizeof(poseReference));
9698      }
9699  
9700      staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9701              gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9702  
9703      staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9704              gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9705  
9706      staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9707              &gCamCapability[cameraId]->max_frame_duration, 1);
9708  
9709      camera_metadata_rational baseGainFactor = {
9710              gCamCapability[cameraId]->base_gain_factor.numerator,
9711              gCamCapability[cameraId]->base_gain_factor.denominator};
9712      staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9713                        &baseGainFactor, 1);
9714  
9715      staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9716                       (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9717  
9718      int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9719              gCamCapability[cameraId]->pixel_array_size.height};
9720      staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9721                        pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9722  
9723      int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9724              gCamCapability[cameraId]->active_array_size.top,
9725              gCamCapability[cameraId]->active_array_size.width,
9726              gCamCapability[cameraId]->active_array_size.height};
9727      staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9728              active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9729  
9730      staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9731              &gCamCapability[cameraId]->white_level, 1);
9732  
9733      int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9734      adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9735              gCamCapability[cameraId]->color_arrangement);
9736      staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
9737              adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
9738  
9739  #ifndef USE_HAL_3_3
9740      bool hasBlackRegions = false;
9741      if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9742          LOGW("black_region_count: %d is bounded to %d",
9743              gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9744          gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9745      }
9746      if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9747          int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9748          for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9749              opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9750          }
9751          staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9752                  opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9753          hasBlackRegions = true;
9754      }
9755  #endif
9756      staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9757              &gCamCapability[cameraId]->flash_charge_duration, 1);
9758  
9759      staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9760              &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9761  
9762      uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9763              ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9764              ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
9765      staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9766              &timestampSource, 1);
9767  
9768      //update histogram vendor data
9769      staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
9770              &gCamCapability[cameraId]->histogram_size, 1);
9771  
9772      staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
9773              &gCamCapability[cameraId]->max_histogram_count, 1);
9774  
9775      //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9776      //so that app can request fewer number of bins than the maximum supported.
9777      std::vector<int32_t> histBins;
9778      int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9779      histBins.push_back(maxHistBins);
9780      while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9781             (maxHistBins & 0x1) == 0) {
9782          histBins.push_back(maxHistBins >> 1);
9783          maxHistBins >>= 1;
9784      }
9785      staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9786              histBins.data(), histBins.size());
9787      if (!histBins.empty()) {
9788          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9789      }
9790  
9791      int32_t sharpness_map_size[] = {
9792              gCamCapability[cameraId]->sharpness_map_size.width,
9793              gCamCapability[cameraId]->sharpness_map_size.height};
9794  
9795      staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9796              sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9797  
9798      staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9799              &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9800  
9801      int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9802      if (0 <= indexPD) {
9803          // Advertise PD stats data as part of the Depth capabilities
9804          int32_t depthWidth =
9805                  gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9806          int32_t depthHeight =
9807                  gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9808          int32_t depthStride =
9809                  gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
9810          int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9811          assert(0 < depthSamplesCount);
9812          staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9813                  &depthSamplesCount, 1);
9814  
9815          int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9816                  depthHeight,
9817                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9818                  HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9819                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9820          staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9821                  depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9822  
9823          int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9824                  depthHeight, 33333333,
9825                  HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9826          staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9827                  depthMinDuration,
9828                  sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9829  
9830          int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9831                  depthHeight, 0,
9832                  HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9833          staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9834                  depthStallDuration,
9835                  sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9836  
9837          uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9838          staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9839  
9840          int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9841          staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9842                  pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
9843          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
9844  
9845          staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9846                  reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9847                  sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9848          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
9849  
9850          staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9851                  reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9852                  sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9853          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
9854  
9855          staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9856                  reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9857                  sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
9858          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
9859      }
9860  
9861  
9862      staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
9863          &(gCamCapability[cameraId]->wb_cal.num_lights), 1);
9864      available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS);
9865  
9866      const int32_t num_lights = gCamCapability[cameraId]->wb_cal.num_lights;
9867      staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
9868          gCamCapability[cameraId]->wb_cal.r_over_g, num_lights);
9869      available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS);
9870  
9871      staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
9872          gCamCapability[cameraId]->wb_cal.b_over_g, num_lights);
9873      available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS);
9874  
9875      staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
9876          &(gCamCapability[cameraId]->wb_cal.gr_over_gb), 1);
9877      available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO);
9878  
9879      int32_t scalar_formats[] = {
9880              ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9881              ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9882              ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9883              ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9884              HAL_PIXEL_FORMAT_RAW10,
9885              HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
9886      size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9887      staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9888              scalar_formats_count);
9889  
9890      int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9891      count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9892      makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9893              count, MAX_SIZES_CNT, available_processed_sizes);
9894      staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9895              available_processed_sizes, count * 2);
9896  
9897      int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9898      count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9899      makeTable(gCamCapability[cameraId]->raw_dim,
9900              count, MAX_SIZES_CNT, available_raw_sizes);
9901      staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9902              available_raw_sizes, count * 2);
9903  
9904      int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9905      count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9906      makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9907              count, MAX_SIZES_CNT, available_fps_ranges);
9908      staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9909              available_fps_ranges, count * 2);
9910  
9911      camera_metadata_rational exposureCompensationStep = {
9912              gCamCapability[cameraId]->exp_compensation_step.numerator,
9913              gCamCapability[cameraId]->exp_compensation_step.denominator};
9914      staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9915                        &exposureCompensationStep, 1);
9916  
9917      Vector<uint8_t> availableVstabModes;
9918      availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9919      char eis_prop[PROPERTY_VALUE_MAX];
9920      bool eisSupported = false;
9921      memset(eis_prop, 0, sizeof(eis_prop));
9922      property_get("persist.camera.eis.enable", eis_prop, "1");
9923      uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
9924      count = IS_TYPE_MAX;
9925      count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9926      for (size_t i = 0; i < count; i++) {
9927          if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9928              (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9929              eisSupported = true;
9930              break;
9931          }
9932      }
9933      if (facingBack && eis_prop_set && eisSupported) {
9934          availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9935      }
9936      staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9937                        availableVstabModes.array(), availableVstabModes.size());
9938  
9939      /*HAL 1 and HAL 3 common*/
9940      uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9941      uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9942      uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9943      // Cap the max zoom to the max preferred value
9944      float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
9945      staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9946              &maxZoom, 1);
9947  
9948      uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9949      staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9950  
9951      int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9952      if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9953          max3aRegions[2] = 0; /* AF not supported */
9954      staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9955              max3aRegions, 3);
9956  
9957      /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9958      memset(prop, 0, sizeof(prop));
9959      property_get("persist.camera.facedetect", prop, "1");
9960      uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9961      LOGD("Support face detection mode: %d",
9962               supportedFaceDetectMode);
9963  
9964      int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
9965      /* support mode should be OFF if max number of face is 0 */
9966      if (maxFaces <= 0) {
9967          supportedFaceDetectMode = 0;
9968      }
9969      Vector<uint8_t> availableFaceDetectModes;
9970      availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9971      if (supportedFaceDetectMode == 1) {
9972          availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9973      } else if (supportedFaceDetectMode == 2) {
9974          availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9975      } else if (supportedFaceDetectMode == 3) {
9976          availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9977          availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9978      } else {
9979          maxFaces = 0;
9980      }
9981      staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9982              availableFaceDetectModes.array(),
9983              availableFaceDetectModes.size());
9984      staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9985              (int32_t *)&maxFaces, 1);
9986      uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9987      staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9988              &face_bsgc, 1);
9989  
9990      int32_t exposureCompensationRange[] = {
9991              gCamCapability[cameraId]->exposure_compensation_min,
9992              gCamCapability[cameraId]->exposure_compensation_max};
9993      staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9994              exposureCompensationRange,
9995              sizeof(exposureCompensationRange)/sizeof(int32_t));
9996  
9997      uint8_t lensFacing = (facingBack) ?
9998              ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9999      staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
10000  
10001      staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10002                        available_thumbnail_sizes,
10003                        sizeof(available_thumbnail_sizes)/sizeof(int32_t));
10004  
10005      /*all sizes will be clubbed into this tag*/
10006      count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10007      /*android.scaler.availableStreamConfigurations*/
10008      Vector<int32_t> available_stream_configs;
10009      cam_dimension_t active_array_dim;
10010      active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
10011      active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
10012  
10013      /*advertise list of input dimensions supported based on below property.
10014      By default all sizes upto 5MP will be advertised.
10015      Note that the setprop resolution format should be WxH.
10016      e.g: adb shell setprop persist.camera.input.minsize 1280x720
10017      To list all supported sizes, setprop needs to be set with "0x0" */
10018      cam_dimension_t minInputSize = {2592,1944}; //5MP
10019      memset(prop, 0, sizeof(prop));
10020      property_get("persist.camera.input.minsize", prop, "2592x1944");
10021      if (strlen(prop) > 0) {
10022          char *saveptr = NULL;
10023          char *token = strtok_r(prop, "x", &saveptr);
10024          if (token != NULL) {
10025              minInputSize.width = atoi(token);
10026          }
10027          token = strtok_r(NULL, "x", &saveptr);
10028          if (token != NULL) {
10029              minInputSize.height = atoi(token);
10030          }
10031      }
10032  
10033      /* Add input/output stream configurations for each scalar formats*/
10034      for (size_t j = 0; j < scalar_formats_count; j++) {
10035          switch (scalar_formats[j]) {
10036          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10037          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10038          case HAL_PIXEL_FORMAT_RAW10:
10039              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10040                      gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10041                  addStreamConfig(available_stream_configs, scalar_formats[j],
10042                          gCamCapability[cameraId]->raw_dim[i],
10043                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10044              }
10045              break;
10046          case HAL_PIXEL_FORMAT_BLOB:
10047              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10048                      gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10049                  addStreamConfig(available_stream_configs, scalar_formats[j],
10050                          gCamCapability[cameraId]->picture_sizes_tbl[i],
10051                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10052              }
10053              break;
10054          case HAL_PIXEL_FORMAT_YCbCr_420_888:
10055          case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
10056          default:
10057              cam_dimension_t largest_picture_size;
10058              memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
10059              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10060                      gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10061                  addStreamConfig(available_stream_configs, scalar_formats[j],
10062                          gCamCapability[cameraId]->picture_sizes_tbl[i],
10063                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10064                  /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
10065                  if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
10066                          scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
10067                       if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
10068                              >= minInputSize.width) || (gCamCapability[cameraId]->
10069                              picture_sizes_tbl[i].height >= minInputSize.height)) {
10070                           addStreamConfig(available_stream_configs, scalar_formats[j],
10071                                   gCamCapability[cameraId]->picture_sizes_tbl[i],
10072                                   ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10073                       }
10074                  }
10075              }
10076  
10077              break;
10078          }
10079      }
10080  
10081      staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10082                        available_stream_configs.array(), available_stream_configs.size());
10083  
10084      /* android.scaler.availableMinFrameDurations */
10085      Vector<int64_t> available_min_durations;
10086      for (size_t j = 0; j < scalar_formats_count; j++) {
10087          switch (scalar_formats[j]) {
10088          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10089          case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10090          case HAL_PIXEL_FORMAT_RAW10:
10091              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10092                      gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10093                  available_min_durations.add(scalar_formats[j]);
10094                  available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10095                  available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10096                  available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
10097              }
10098              break;
10099          default:
10100              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10101                      gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10102                  available_min_durations.add(scalar_formats[j]);
10103                  available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10104                  available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10105                  available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
10106              }
10107              break;
10108          }
10109      }
10110      staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
10111                        available_min_durations.array(), available_min_durations.size());
10112  
10113      Vector<int32_t> available_hfr_configs;
10114      for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
10115          int32_t fps = 0;
10116          switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
10117          case CAM_HFR_MODE_60FPS:
10118              fps = 60;
10119              break;
10120          case CAM_HFR_MODE_90FPS:
10121              fps = 90;
10122              break;
10123          case CAM_HFR_MODE_120FPS:
10124              fps = 120;
10125              break;
10126          case CAM_HFR_MODE_150FPS:
10127              fps = 150;
10128              break;
10129          case CAM_HFR_MODE_180FPS:
10130              fps = 180;
10131              break;
10132          case CAM_HFR_MODE_210FPS:
10133              fps = 210;
10134              break;
10135          case CAM_HFR_MODE_240FPS:
10136              fps = 240;
10137              break;
10138          case CAM_HFR_MODE_480FPS:
10139              fps = 480;
10140              break;
10141          case CAM_HFR_MODE_OFF:
10142          case CAM_HFR_MODE_MAX:
10143          default:
10144              break;
10145          }
10146  
10147          /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
10148          if (fps >= MIN_FPS_FOR_BATCH_MODE) {
10149              /* For each HFR frame rate, need to advertise one variable fps range
10150               * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
10151               * and [120, 120]. While camcorder preview alone is running [30, 120] is
10152               * set by the app. When video recording is started, [120, 120] is
10153               * set. This way sensor configuration does not change when recording
10154               * is started */
10155  
10156              /* (width, height, fps_min, fps_max, batch_size_max) */
10157              for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
10158                  j < MAX_SIZES_CNT; j++) {
10159                  available_hfr_configs.add(
10160                          gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10161                  available_hfr_configs.add(
10162                          gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10163                  available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
10164                  available_hfr_configs.add(fps);
10165                  available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10166  
10167                  /* (width, height, fps_min, fps_max, batch_size_max) */
10168                  available_hfr_configs.add(
10169                          gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10170                  available_hfr_configs.add(
10171                          gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10172                  available_hfr_configs.add(fps);
10173                  available_hfr_configs.add(fps);
10174                  available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10175              }
10176         }
10177      }
10178      //Advertise HFR capability only if the property is set
10179      memset(prop, 0, sizeof(prop));
10180      property_get("persist.camera.hal3hfr.enable", prop, "1");
10181      uint8_t hfrEnable = (uint8_t)atoi(prop);
10182  
10183      if(hfrEnable && available_hfr_configs.array()) {
10184          staticInfo.update(
10185                  ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
10186                  available_hfr_configs.array(), available_hfr_configs.size());
10187      }
10188  
10189      int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
10190      staticInfo.update(ANDROID_JPEG_MAX_SIZE,
10191                        &max_jpeg_size, 1);
10192  
10193      uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
10194      size_t size = 0;
10195      count = CAM_EFFECT_MODE_MAX;
10196      count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
10197      for (size_t i = 0; i < count; i++) {
10198          int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10199                  gCamCapability[cameraId]->supported_effects[i]);
10200          if (NAME_NOT_FOUND != val) {
10201              avail_effects[size] = (uint8_t)val;
10202              size++;
10203          }
10204      }
10205      staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
10206                        avail_effects,
10207                        size);
10208  
10209      uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
10210      uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
10211      size_t supported_scene_modes_cnt = 0;
10212      count = CAM_SCENE_MODE_MAX;
10213      count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
10214      for (size_t i = 0; i < count; i++) {
10215          if (gCamCapability[cameraId]->supported_scene_modes[i] !=
10216                  CAM_SCENE_MODE_OFF) {
10217              int val = lookupFwkName(SCENE_MODES_MAP,
10218                      METADATA_MAP_SIZE(SCENE_MODES_MAP),
10219                      gCamCapability[cameraId]->supported_scene_modes[i]);
10220  
10221              if (NAME_NOT_FOUND != val) {
10222                  avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
10223                  supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
10224                  supported_scene_modes_cnt++;
10225              }
10226          }
10227      }
10228      staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10229                        avail_scene_modes,
10230                        supported_scene_modes_cnt);
10231  
10232      uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
10233      makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
10234                        supported_scene_modes_cnt,
10235                        CAM_SCENE_MODE_MAX,
10236                        scene_mode_overrides,
10237                        supported_indexes,
10238                        cameraId);
10239  
10240      if (supported_scene_modes_cnt == 0) {
10241          supported_scene_modes_cnt = 1;
10242          avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10243      }
10244  
10245      staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10246              scene_mode_overrides, supported_scene_modes_cnt * 3);
10247  
10248      uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10249                                           ANDROID_CONTROL_MODE_AUTO,
10250                                           ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10251      staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10252              available_control_modes,
10253              3);
10254  
10255      uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10256      size = 0;
10257      count = CAM_ANTIBANDING_MODE_MAX;
10258      count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10259      for (size_t i = 0; i < count; i++) {
10260          int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10261                  gCamCapability[cameraId]->supported_antibandings[i]);
10262          if (NAME_NOT_FOUND != val) {
10263              avail_antibanding_modes[size] = (uint8_t)val;
10264              size++;
10265          }
10266  
10267      }
10268      staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10269                        avail_antibanding_modes,
10270                        size);
10271  
10272      uint8_t avail_abberation_modes[] = {
10273              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10274              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10275              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10276      count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10277      count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10278      if (0 == count) {
10279          //  If no aberration correction modes are available for a device, this advertise OFF mode
10280          size = 1;
10281      } else {
10282          // If count is not zero then atleast one among the FAST or HIGH quality is supported
10283          // So, advertize all 3 modes if atleast any one mode is supported as per the
10284          // new M requirement
10285          size = 3;
10286      }
10287      staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10288              avail_abberation_modes,
10289              size);
10290  
10291      uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10292      size = 0;
10293      count = CAM_FOCUS_MODE_MAX;
10294      count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10295      for (size_t i = 0; i < count; i++) {
10296          int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10297                  gCamCapability[cameraId]->supported_focus_modes[i]);
10298          if (NAME_NOT_FOUND != val) {
10299              avail_af_modes[size] = (uint8_t)val;
10300              size++;
10301          }
10302      }
10303      staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10304                        avail_af_modes,
10305                        size);
10306  
10307      uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10308      size = 0;
10309      count = CAM_WB_MODE_MAX;
10310      count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10311      for (size_t i = 0; i < count; i++) {
10312          int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10313                  METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10314                  gCamCapability[cameraId]->supported_white_balances[i]);
10315          if (NAME_NOT_FOUND != val) {
10316              avail_awb_modes[size] = (uint8_t)val;
10317              size++;
10318          }
10319      }
10320      staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10321                        avail_awb_modes,
10322                        size);
10323  
10324      uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10325      count = CAM_FLASH_FIRING_LEVEL_MAX;
10326      count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10327              count);
10328      for (size_t i = 0; i < count; i++) {
10329          available_flash_levels[i] =
10330                  gCamCapability[cameraId]->supported_firing_levels[i];
10331      }
10332      staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10333              available_flash_levels, count);
10334  
10335      uint8_t flashAvailable;
10336      if (gCamCapability[cameraId]->flash_available)
10337          flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10338      else
10339          flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10340      staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10341              &flashAvailable, 1);
10342  
10343      Vector<uint8_t> avail_ae_modes;
10344      count = CAM_AE_MODE_MAX;
10345      count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10346      for (size_t i = 0; i < count; i++) {
10347          uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10348          if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10349              aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
10350          }
10351          avail_ae_modes.add(aeMode);
10352      }
10353      if (flashAvailable) {
10354          avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10355          avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10356      }
10357      staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10358                        avail_ae_modes.array(),
10359                        avail_ae_modes.size());
10360  
10361      int32_t sensitivity_range[2];
10362      sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10363      sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10364      staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10365                        sensitivity_range,
10366                        sizeof(sensitivity_range) / sizeof(int32_t));
10367  
10368      staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10369                        &gCamCapability[cameraId]->max_analog_sensitivity,
10370                        1);
10371  
10372      int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10373      staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10374                        &sensor_orientation,
10375                        1);
10376  
10377      int32_t max_output_streams[] = {
10378              MAX_STALLING_STREAMS,
10379              MAX_PROCESSED_STREAMS,
10380              MAX_RAW_STREAMS};
10381      staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10382              max_output_streams,
10383              sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10384  
10385      uint8_t avail_leds = 0;
10386      staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10387                        &avail_leds, 0);
10388  
10389      uint8_t focus_dist_calibrated;
10390      int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10391              gCamCapability[cameraId]->focus_dist_calibrated);
10392      if (NAME_NOT_FOUND != val) {
10393          focus_dist_calibrated = (uint8_t)val;
10394          staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10395                       &focus_dist_calibrated, 1);
10396      }
10397  
10398      int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10399      size = 0;
10400      count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10401              MAX_TEST_PATTERN_CNT);
10402      for (size_t i = 0; i < count; i++) {
10403          int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10404                  gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10405          if (NAME_NOT_FOUND != testpatternMode) {
10406              avail_testpattern_modes[size] = testpatternMode;
10407              size++;
10408          }
10409      }
10410      staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10411                        avail_testpattern_modes,
10412                        size);
10413  
10414      uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10415      staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10416                        &max_pipeline_depth,
10417                        1);
10418  
10419      int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10420      staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10421                        &partial_result_count,
10422                         1);
10423  
10424      int32_t max_stall_duration = MAX_REPROCESS_STALL;
10425      staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10426  
10427      Vector<uint8_t> available_capabilities;
10428      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10429      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10430      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10431      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10432      if (supportBurst) {
10433          available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10434      }
10435      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10436      available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10437      if (hfrEnable && available_hfr_configs.array()) {
10438          available_capabilities.add(
10439                  ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10440      }
10441  
10442      if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10443          available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10444      }
10445      // Only back camera supports MOTION_TRACKING
10446      if (cameraId == 0) {
10447          available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
10448      }
10449  
10450      staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10451              available_capabilities.array(),
10452              available_capabilities.size());
10453  
10454      //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10455      //Assumption is that all bayer cameras support MANUAL_SENSOR.
10456      uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10457              ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10458  
10459      staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10460              &aeLockAvailable, 1);
10461  
10462      //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10463      //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10464      uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10465              ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10466  
10467      staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10468              &awbLockAvailable, 1);
10469  
10470      int32_t max_input_streams = 1;
10471      staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10472                        &max_input_streams,
10473                        1);
10474  
10475      /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10476      int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10477              HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10478              HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10479              HAL_PIXEL_FORMAT_YCbCr_420_888};
10480      staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10481                        io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10482  
10483      int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10484      staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10485                        &max_latency,
10486                        1);
10487  
10488  #ifndef USE_HAL_3_3
10489      int32_t isp_sensitivity_range[2];
10490      isp_sensitivity_range[0] =
10491          gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10492      isp_sensitivity_range[1] =
10493          gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10494      staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10495                        isp_sensitivity_range,
10496                        sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10497  #endif
10498  
10499      uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10500                                             ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10501      staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10502              available_hot_pixel_modes,
10503              sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10504  
10505      uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10506                                           ANDROID_SHADING_MODE_FAST,
10507                                           ANDROID_SHADING_MODE_HIGH_QUALITY};
10508      staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10509                        available_shading_modes,
10510                        3);
10511  
10512      uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10513                                                    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10514      staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10515                        available_lens_shading_map_modes,
10516                        2);
10517  
10518      uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10519                                        ANDROID_EDGE_MODE_FAST,
10520                                        ANDROID_EDGE_MODE_HIGH_QUALITY,
10521                                        ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10522      staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10523              available_edge_modes,
10524              sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10525  
10526      uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10527                                             ANDROID_NOISE_REDUCTION_MODE_FAST,
10528                                             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10529                                             ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10530                                             ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10531      staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10532              available_noise_red_modes,
10533              sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10534  
10535      uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10536                                           ANDROID_TONEMAP_MODE_FAST,
10537                                           ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10538      staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10539              available_tonemap_modes,
10540              sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10541  
10542      uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10543      staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10544              available_hot_pixel_map_modes,
10545              sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10546  
10547      val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10548              gCamCapability[cameraId]->reference_illuminant1);
10549      if (NAME_NOT_FOUND != val) {
10550          uint8_t fwkReferenceIlluminant = (uint8_t)val;
10551          staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10552      }
10553  
10554      val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10555              gCamCapability[cameraId]->reference_illuminant2);
10556      if (NAME_NOT_FOUND != val) {
10557          uint8_t fwkReferenceIlluminant = (uint8_t)val;
10558          staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10559      }
10560  
10561      staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10562              (void *)gCamCapability[cameraId]->forward_matrix1,
10563              FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10564  
10565      staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10566              (void *)gCamCapability[cameraId]->forward_matrix2,
10567              FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10568  
10569      staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10570              (void *)gCamCapability[cameraId]->color_transform1,
10571              COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10572  
10573      staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10574              (void *)gCamCapability[cameraId]->color_transform2,
10575              COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10576  
10577      staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10578              (void *)gCamCapability[cameraId]->calibration_transform1,
10579              CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10580  
10581      staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10582              (void *)gCamCapability[cameraId]->calibration_transform2,
10583              CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10584  
10585  #ifndef USE_HAL_3_3
10586  
10587      int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10588          ANDROID_CONTROL_AE_TARGET_FPS_RANGE, QCAMERA3_INSTANT_AEC_MODE, QCAMERA3_USE_AV_TIMER,
10589          QCAMERA3_VIDEO_HDR_MODE, TANGO_MODE_DATA_SENSOR_FULLFOV};
10590      staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
10591              sizeof(session_keys) / sizeof(session_keys[0]));
10592  
10593  #endif
10594  
10595      int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10596         ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10597         ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10598         ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10599         ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10600         ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10601         ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10602         ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10603         ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10604         ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10605         ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10606         ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10607         ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10608         ANDROID_JPEG_GPS_COORDINATES,
10609         ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10610         ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10611         ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10612         ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10613         ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10614         ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10615         ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10616         ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10617         ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10618         ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
10619  #ifndef USE_HAL_3_3
10620         ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10621  #endif
10622         ANDROID_STATISTICS_FACE_DETECT_MODE,
10623         ANDROID_STATISTICS_SHARPNESS_MAP_MODE, ANDROID_STATISTICS_OIS_DATA_MODE,
10624         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10625         ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10626         ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10627         QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10628         QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10629         QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10630         QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10631         QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10632         QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10633         QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10634         QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10635         QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10636         QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10637         QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10638         QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10639         QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10640         QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10641         QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10642         QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10643         QCAMERA3_BINNING_CORRECTION_MODE,
10644         /* DevCamDebug metadata request_keys_basic */
10645         DEVCAMDEBUG_META_ENABLE,
10646         /* DevCamDebug metadata end */
10647         NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10648         NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10649         TANGO_MODE_DATA_SENSOR_FULLFOV,
10650         NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10651         NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
10652         NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
10653         NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
10654         };
10655  
10656      size_t request_keys_cnt =
10657              sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10658      Vector<int32_t> available_request_keys;
10659      available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10660      if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10661          available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10662      }
10663  
10664      if (gExposeEnableZslKey) {
10665          available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10666          available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
10667          available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
10668          available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
10669      }
10670  
10671      staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10672              available_request_keys.array(), available_request_keys.size());
10673  
10674      int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10675         ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10676         ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10677         ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10678         ANDROID_CONTROL_AF_SCENE_CHANGE,
10679         ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10680         ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10681         ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10682         ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10683         ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10684         ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10685         ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10686         ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10687         ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10688         ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10689         ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10690         ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10691         ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10692         ANDROID_STATISTICS_FACE_DETECT_MODE,
10693         ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10694         ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10695         ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
10696         ANDROID_STATISTICS_FACE_SCORES, ANDROID_STATISTICS_OIS_DATA_MODE,
10697         ANDROID_STATISTICS_OIS_TIMESTAMPS, ANDROID_STATISTICS_OIS_X_SHIFTS,
10698         ANDROID_STATISTICS_OIS_Y_SHIFTS,
10699  #ifndef USE_HAL_3_3
10700         ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10701  #endif
10702         NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10703         NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
10704         QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10705         QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10706         QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10707         QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10708         QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10709         QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10710         QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10711         QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10712         QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10713         QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10714         QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10715         QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10716         QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10717         QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10718         QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10719         QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10720         QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10721         QCAMERA3_STATS_GAZE_DEGREE,
10722         // DevCamDebug metadata result_keys_basic
10723         DEVCAMDEBUG_META_ENABLE,
10724         // DevCamDebug metadata result_keys AF
10725         DEVCAMDEBUG_AF_LENS_POSITION,
10726         DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10727         DEVCAMDEBUG_AF_TOF_DISTANCE,
10728         DEVCAMDEBUG_AF_LUMA,
10729         DEVCAMDEBUG_AF_HAF_STATE,
10730         DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10731         DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10732         DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10733         DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10734         DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10735         DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10736         DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10737         DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10738         DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10739         DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10740         DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10741         DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10742         DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10743         DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10744         DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10745         DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10746         DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10747         DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10748         DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10749         DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10750         DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10751         DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10752         // DevCamDebug metadata result_keys AEC
10753         DEVCAMDEBUG_AEC_TARGET_LUMA,
10754         DEVCAMDEBUG_AEC_COMP_LUMA,
10755         DEVCAMDEBUG_AEC_AVG_LUMA,
10756         DEVCAMDEBUG_AEC_CUR_LUMA,
10757         DEVCAMDEBUG_AEC_LINECOUNT,
10758         DEVCAMDEBUG_AEC_REAL_GAIN,
10759         DEVCAMDEBUG_AEC_EXP_INDEX,
10760         DEVCAMDEBUG_AEC_LUX_IDX,
10761         // DevCamDebug metadata result_keys zzHDR
10762         DEVCAMDEBUG_AEC_L_REAL_GAIN,
10763         DEVCAMDEBUG_AEC_L_LINECOUNT,
10764         DEVCAMDEBUG_AEC_S_REAL_GAIN,
10765         DEVCAMDEBUG_AEC_S_LINECOUNT,
10766         DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10767         DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10768         // DevCamDebug metadata result_keys ADRC
10769         DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10770         DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10771         DEVCAMDEBUG_AEC_GTM_RATIO,
10772         DEVCAMDEBUG_AEC_LTM_RATIO,
10773         DEVCAMDEBUG_AEC_LA_RATIO,
10774         DEVCAMDEBUG_AEC_GAMMA_RATIO,
10775         // DevCamDebug metadata result_keys AEC MOTION
10776         DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10777         DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10778         DEVCAMDEBUG_AEC_SUBJECT_MOTION,
10779         // DevCamDebug metadata result_keys AWB
10780         DEVCAMDEBUG_AWB_R_GAIN,
10781         DEVCAMDEBUG_AWB_G_GAIN,
10782         DEVCAMDEBUG_AWB_B_GAIN,
10783         DEVCAMDEBUG_AWB_CCT,
10784         DEVCAMDEBUG_AWB_DECISION,
10785         /* DevCamDebug metadata end */
10786         NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10787         NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10788         NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
10789         NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
10790         NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10791         NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
10792         NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
10793         NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
10794         NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
10795         NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
10796         NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
10797         NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
10798         NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
10799         NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
10800         NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
10801         NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
10802         NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
10803         NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION
10804         };
10805  
10806      size_t result_keys_cnt =
10807              sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10808  
10809      Vector<int32_t> available_result_keys;
10810      available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10811      if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10812          available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10813      }
10814      if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10815          available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10816          available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10817      }
10818      if (supportedFaceDetectMode == 1) {
10819          available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10820          available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10821      } else if ((supportedFaceDetectMode == 2) ||
10822              (supportedFaceDetectMode == 3)) {
10823          available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10824          available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10825      }
10826  #ifndef USE_HAL_3_3
10827      {
10828          available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10829          available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10830      }
10831  #endif
10832  
10833      if (gExposeEnableZslKey) {
10834          available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10835          available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
10836          available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10837          available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
10838      }
10839  
10840      staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10841              available_result_keys.array(), available_result_keys.size());
10842  
10843      int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10844         ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10845         ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10846         ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10847         ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10848         ANDROID_SCALER_CROPPING_TYPE,
10849         ANDROID_SYNC_MAX_LATENCY,
10850         ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10851         ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10852         ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10853         ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10854         ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10855         ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10856         ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10857         ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10858         ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10859         ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10860         ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10861         ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10862         ANDROID_LENS_FACING,
10863         ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10864         ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10865         ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10866         ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10867         ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10868         ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10869         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10870         /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10871         ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10872         ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10873         ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10874         ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10875         ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10876         ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10877         ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10878         ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10879         ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10880         ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10881         ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10882         ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
10883         ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
10884         ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10885         ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10886         ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10887         ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10888         ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10889         ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10890         ANDROID_TONEMAP_MAX_CURVE_POINTS,
10891         ANDROID_CONTROL_AVAILABLE_MODES,
10892         ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10893         ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10894         ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10895         ANDROID_SHADING_AVAILABLE_MODES,
10896         ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10897  #ifndef USE_HAL_3_3
10898         ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10899         ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10900  #endif
10901         QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
10902         QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
10903         QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10904         QCAMERA3_SHARPNESS_RANGE,
10905         QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
10906         QCAMERA3_STATS_BSGC_AVAILABLE
10907         };
10908  
10909      available_characteristics_keys.appendArray(characteristics_keys_basic,
10910              sizeof(characteristics_keys_basic)/sizeof(int32_t));
10911  #ifndef USE_HAL_3_3
10912      if (hasBlackRegions) {
10913          available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10914      }
10915  #endif
10916  
10917      if (cameraId == 0) {
10918          int32_t lensCalibrationKeys[] = {
10919              ANDROID_LENS_POSE_ROTATION,
10920              ANDROID_LENS_POSE_TRANSLATION,
10921              ANDROID_LENS_POSE_REFERENCE,
10922              ANDROID_LENS_INTRINSIC_CALIBRATION,
10923              ANDROID_LENS_DISTORTION,
10924          };
10925          available_characteristics_keys.appendArray(lensCalibrationKeys,
10926                  sizeof(lensCalibrationKeys) / sizeof(lensCalibrationKeys[0]));
10927      }
10928  
10929      if (0 <= indexPD) {
10930          int32_t depthKeys[] = {
10931                  ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10932                  ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10933                  ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10934                  ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10935                  ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10936          };
10937          available_characteristics_keys.appendArray(depthKeys,
10938                  sizeof(depthKeys) / sizeof(depthKeys[0]));
10939      }
10940  
10941      /*available stall durations depend on the hw + sw and will be different for different devices */
10942      /*have to add for raw after implementation*/
10943      int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10944      size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10945  
10946      Vector<int64_t> available_stall_durations;
10947      for (uint32_t j = 0; j < stall_formats_count; j++) {
10948          if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10949              for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10950                      gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10951                  available_stall_durations.add(stall_formats[j]);
10952                  available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10953                  available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10954                  available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10955            }
10956          } else {
10957              for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10958                      gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10959                  available_stall_durations.add(stall_formats[j]);
10960                  available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10961                  available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10962                  available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10963              }
10964          }
10965      }
10966      staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10967                        available_stall_durations.array(),
10968                        available_stall_durations.size());
10969  
10970      //QCAMERA3_OPAQUE_RAW
10971      uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10972      cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10973      switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10974      case LEGACY_RAW:
10975          if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10976              fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10977          else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10978              fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10979          else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10980              fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10981          raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10982          break;
10983      case MIPI_RAW:
10984          if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10985              fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10986          else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10987              fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10988          else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10989              fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10990          raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10991          break;
10992      default:
10993          LOGE("unknown opaque_raw_format %d",
10994                  gCamCapability[cameraId]->opaque_raw_fmt);
10995          break;
10996      }
10997      staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10998  
10999      Vector<int32_t> strides;
11000      for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11001              gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11002          cam_stream_buf_plane_info_t buf_planes;
11003          strides.add(gCamCapability[cameraId]->raw_dim[i].width);
11004          strides.add(gCamCapability[cameraId]->raw_dim[i].height);
11005          mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
11006              &gCamCapability[cameraId]->padding_info, &buf_planes);
11007          strides.add(buf_planes.plane_info.mp[0].stride);
11008      }
11009  
11010      if (!strides.isEmpty()) {
11011          staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
11012                  strides.size());
11013          available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
11014      }
11015  
11016      //TBD: remove the following line once backend advertises zzHDR in feature mask
11017      gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
11018      //Video HDR default
11019      if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
11020              (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
11021              CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
11022          int32_t vhdr_mode[] = {
11023                  QCAMERA3_VIDEO_HDR_MODE_OFF,
11024                  QCAMERA3_VIDEO_HDR_MODE_ON};
11025  
11026          size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
11027          staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
11028                      vhdr_mode, vhdr_mode_count);
11029          available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
11030      }
11031  
11032      staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11033              (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
11034              sizeof(gCamCapability[cameraId]->related_cam_calibration));
11035  
11036      uint8_t isMonoOnly =
11037              (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
11038      staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
11039              &isMonoOnly, 1);
11040  
11041  #ifndef USE_HAL_3_3
11042      Vector<int32_t> opaque_size;
11043      for (size_t j = 0; j < scalar_formats_count; j++) {
11044          if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
11045              for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11046                      gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11047                  cam_stream_buf_plane_info_t buf_planes;
11048  
11049                  rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
11050                           &gCamCapability[cameraId]->padding_info, &buf_planes);
11051  
11052                  if (rc == 0) {
11053                      opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
11054                      opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
11055                      opaque_size.add(buf_planes.plane_info.frame_len);
11056                  }else {
11057                      LOGE("raw frame calculation failed!");
11058                  }
11059              }
11060          }
11061      }
11062  
11063      if ((opaque_size.size() > 0) &&
11064              (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
11065          staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
11066      else
11067          LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
11068  #endif
11069  
11070      if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
11071          int32_t avail_ir_modes[CAM_IR_MODE_MAX];
11072          size = 0;
11073          count = CAM_IR_MODE_MAX;
11074          count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
11075          for (size_t i = 0; i < count; i++) {
11076              int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
11077                      gCamCapability[cameraId]->supported_ir_modes[i]);
11078              if (NAME_NOT_FOUND != val) {
11079                  avail_ir_modes[size] = (int32_t)val;
11080                  size++;
11081              }
11082          }
11083          staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
11084                  avail_ir_modes, size);
11085          available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
11086      }
11087  
11088      if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
11089          uint8_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
11090          size = 0;
11091          count = CAM_AEC_CONVERGENCE_MAX;
11092          count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
11093          for (size_t i = 0; i < count; i++) {
11094              int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
11095                      gCamCapability[cameraId]->supported_instant_aec_modes[i]);
11096              if (NAME_NOT_FOUND != val) {
11097                  available_instant_aec_modes[size] = (uint8_t)val;
11098                  size++;
11099              }
11100          }
11101          staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
11102                  available_instant_aec_modes, size);
11103          available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
11104      }
11105  
11106      int32_t sharpness_range[] = {
11107              gCamCapability[cameraId]->sharpness_ctrl.min_value,
11108              gCamCapability[cameraId]->sharpness_ctrl.max_value};
11109      staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
11110  
11111      if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
11112          int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
11113          size = 0;
11114          count = CAM_BINNING_CORRECTION_MODE_MAX;
11115          count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
11116          for (size_t i = 0; i < count; i++) {
11117              int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
11118                      METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
11119                      gCamCapability[cameraId]->supported_binning_modes[i]);
11120              if (NAME_NOT_FOUND != val) {
11121                  avail_binning_modes[size] = (int32_t)val;
11122                  size++;
11123              }
11124          }
11125          staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
11126                  avail_binning_modes, size);
11127          available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
11128      }
11129  
11130      if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
11131          int32_t available_aec_modes[CAM_AEC_MODE_MAX];
11132          size = 0;
11133          count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
11134          for (size_t i = 0; i < count; i++) {
11135              int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
11136                      gCamCapability[cameraId]->supported_aec_modes[i]);
11137              if (NAME_NOT_FOUND != val)
11138                  available_aec_modes[size++] = val;
11139          }
11140          staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
11141                  available_aec_modes, size);
11142          available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
11143      }
11144  
11145      if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
11146          int32_t available_iso_modes[CAM_ISO_MODE_MAX];
11147          size = 0;
11148          count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
11149          for (size_t i = 0; i < count; i++) {
11150              int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
11151                      gCamCapability[cameraId]->supported_iso_modes[i]);
11152              if (NAME_NOT_FOUND != val)
11153                  available_iso_modes[size++] = val;
11154          }
11155          staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
11156                  available_iso_modes, size);
11157          available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
11158      }
11159  
11160      int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
11161      for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
11162          available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
11163      staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
11164              available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
11165  
11166      int32_t available_saturation_range[4];
11167      available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
11168      available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
11169      available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
11170      available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
11171      staticInfo.update(QCAMERA3_SATURATION_RANGE,
11172              available_saturation_range, 4);
11173  
11174      uint8_t is_hdr_values[2];
11175      is_hdr_values[0] = 0;
11176      is_hdr_values[1] = 1;
11177      staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
11178              is_hdr_values, 2);
11179  
11180      float is_hdr_confidence_range[2];
11181      is_hdr_confidence_range[0] = 0.0;
11182      is_hdr_confidence_range[1] = 1.0;
11183      staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
11184              is_hdr_confidence_range, 2);
11185  
11186      size_t eepromLength = strnlen(
11187              reinterpret_cast<const char *>(
11188                      gCamCapability[cameraId]->eeprom_version_info),
11189              sizeof(gCamCapability[cameraId]->eeprom_version_info));
11190      if (0 < eepromLength) {
11191          char easelInfo[] = ",E:N";
11192          char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
11193          if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
11194              eepromLength += sizeof(easelInfo);
11195              strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
11196                      gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-Y" : ",E:N"),
11197                      MAX_EEPROM_VERSION_INFO_LEN);
11198          }
11199          staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
11200                  gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11201          available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
11202  
11203          staticInfo.update(ANDROID_INFO_VERSION,
11204                  gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11205          available_characteristics_keys.add(ANDROID_INFO_VERSION);
11206      }
11207  
11208      staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
11209                        available_characteristics_keys.array(),
11210                        available_characteristics_keys.size());
11211  
11212      std::vector<uint8_t> availableOisModes;
11213      availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_OFF);
11214      if (cameraId == 0) {
11215          availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_ON);
11216      }
11217  
11218      staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
11219                        availableOisModes.data(),
11220                        availableOisModes.size());
11221  
11222      gStaticMetadata[cameraId] = staticInfo.release();
11223      return rc;
11224  }
11225  
11226  /*===========================================================================
11227   * FUNCTION   : makeTable
11228   *
11229   * DESCRIPTION: make a table of sizes
11230   *
11231   * PARAMETERS :
11232   *
11233   *
11234   *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)11235  void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
11236          size_t max_size, int32_t *sizeTable)
11237  {
11238      size_t j = 0;
11239      if (size > max_size) {
11240         size = max_size;
11241      }
11242      for (size_t i = 0; i < size; i++) {
11243          sizeTable[j] = dimTable[i].width;
11244          sizeTable[j+1] = dimTable[i].height;
11245          j+=2;
11246      }
11247  }
11248  
11249  /*===========================================================================
11250   * FUNCTION   : makeFPSTable
11251   *
11252   * DESCRIPTION: make a table of fps ranges
11253   *
11254   * PARAMETERS :
11255   *
11256   *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)11257  void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
11258          size_t max_size, int32_t *fpsRangesTable)
11259  {
11260      size_t j = 0;
11261      if (size > max_size) {
11262         size = max_size;
11263      }
11264      for (size_t i = 0; i < size; i++) {
11265          fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
11266          fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
11267          j+=2;
11268      }
11269  }
11270  
11271  /*===========================================================================
11272   * FUNCTION   : makeOverridesList
11273   *
11274   * DESCRIPTION: make a list of scene mode overrides
11275   *
11276   * PARAMETERS :
11277   *
11278   *
11279   *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)11280  void QCamera3HardwareInterface::makeOverridesList(
11281          cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
11282          uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
11283  {
11284      /*daemon will give a list of overrides for all scene modes.
11285        However we should send the fwk only the overrides for the scene modes
11286        supported by the framework*/
11287      size_t j = 0;
11288      if (size > max_size) {
11289         size = max_size;
11290      }
11291      size_t focus_count = CAM_FOCUS_MODE_MAX;
11292      focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11293              focus_count);
11294      for (size_t i = 0; i < size; i++) {
11295          bool supt = false;
11296          size_t index = supported_indexes[i];
11297          overridesList[j] = gCamCapability[camera_id]->flash_available ?
11298                  ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11299          int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11300                  METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11301                  overridesTable[index].awb_mode);
11302          if (NAME_NOT_FOUND != val) {
11303              overridesList[j+1] = (uint8_t)val;
11304          }
11305          uint8_t focus_override = overridesTable[index].af_mode;
11306          for (size_t k = 0; k < focus_count; k++) {
11307             if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11308                supt = true;
11309                break;
11310             }
11311          }
11312          if (supt) {
11313              val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11314                      focus_override);
11315              if (NAME_NOT_FOUND != val) {
11316                  overridesList[j+2] = (uint8_t)val;
11317              }
11318          } else {
11319             overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11320          }
11321          j+=3;
11322      }
11323  }
11324  
11325  /*===========================================================================
11326   * FUNCTION   : filterJpegSizes
11327   *
11328   * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11329   *              could be downscaled to
11330   *
11331   * PARAMETERS :
11332   *
11333   * RETURN     : length of jpegSizes array
11334   *==========================================================================*/
11335  
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)11336  size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11337          size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11338          uint8_t downscale_factor)
11339  {
11340      if (0 == downscale_factor) {
11341          downscale_factor = 1;
11342      }
11343  
11344      int32_t min_width = active_array_size.width / downscale_factor;
11345      int32_t min_height = active_array_size.height / downscale_factor;
11346      size_t jpegSizesCnt = 0;
11347      if (processedSizesCnt > maxCount) {
11348          processedSizesCnt = maxCount;
11349      }
11350      for (size_t i = 0; i < processedSizesCnt; i+=2) {
11351          if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11352              jpegSizes[jpegSizesCnt] = processedSizes[i];
11353              jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11354              jpegSizesCnt += 2;
11355          }
11356      }
11357      return jpegSizesCnt;
11358  }
11359  
11360  /*===========================================================================
11361   * FUNCTION   : computeNoiseModelEntryS
11362   *
11363   * DESCRIPTION: function to map a given sensitivity to the S noise
11364   *              model parameters in the DNG noise model.
11365   *
11366   * PARAMETERS : sens : the sensor sensitivity
11367   *
11368   ** RETURN    : S (sensor amplification) noise
11369   *
11370   *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)11371  double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11372      double s = gCamCapability[mCameraId]->gradient_S * sens +
11373              gCamCapability[mCameraId]->offset_S;
11374      return ((s < 0.0) ? 0.0 : s);
11375  }
11376  
11377  /*===========================================================================
11378   * FUNCTION   : computeNoiseModelEntryO
11379   *
11380   * DESCRIPTION: function to map a given sensitivity to the O noise
11381   *              model parameters in the DNG noise model.
11382   *
11383   * PARAMETERS : sens : the sensor sensitivity
11384   *
11385   ** RETURN    : O (sensor readout) noise
11386   *
11387   *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)11388  double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11389      int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11390      double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11391              1.0 : (1.0 * sens / max_analog_sens);
11392      double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11393              gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11394      return ((o < 0.0) ? 0.0 : o);
11395  }
11396  
11397  /*===========================================================================
11398   * FUNCTION   : getSensorSensitivity
11399   *
11400   * DESCRIPTION: convert iso_mode to an integer value
11401   *
11402   * PARAMETERS : iso_mode : the iso_mode supported by sensor
11403   *
11404   ** RETURN    : sensitivity supported by sensor
11405   *
11406   *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)11407  int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11408  {
11409      int32_t sensitivity;
11410  
11411      switch (iso_mode) {
11412      case CAM_ISO_MODE_100:
11413          sensitivity = 100;
11414          break;
11415      case CAM_ISO_MODE_200:
11416          sensitivity = 200;
11417          break;
11418      case CAM_ISO_MODE_400:
11419          sensitivity = 400;
11420          break;
11421      case CAM_ISO_MODE_800:
11422          sensitivity = 800;
11423          break;
11424      case CAM_ISO_MODE_1600:
11425          sensitivity = 1600;
11426          break;
11427      default:
11428          sensitivity = -1;
11429          break;
11430      }
11431      return sensitivity;
11432  }
11433  
initHdrPlusClientLocked()11434  int QCamera3HardwareInterface::initHdrPlusClientLocked() {
11435      if (gEaselManagerClient == nullptr) {
11436          gEaselManagerClient = EaselManagerClient::create();
11437          if (gEaselManagerClient == nullptr) {
11438              ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11439              return -ENODEV;
11440          }
11441      }
11442  
11443      if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
11444          // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11445          //  to connect to Easel.
11446          bool doNotpowerOnEasel =
11447                  property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11448  
11449          if (doNotpowerOnEasel) {
11450              ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11451              return OK;
11452          }
11453  
11454          // If Easel is present, power on Easel and suspend it immediately.
11455          status_t res = gEaselManagerClient->open();
11456          if (res != OK) {
11457              ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11458                      res);
11459              return res;
11460          }
11461  
11462          EaselManagerClientOpened = true;
11463  
11464          res = gEaselManagerClient->suspend();
11465          if (res != OK) {
11466              ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11467          }
11468  
11469          gEaselBypassOnly = property_get_bool("persist.camera.hdrplus.disable", false);
11470          gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
11471  
11472          // Expose enableZsl key only when HDR+ mode is enabled.
11473          gExposeEnableZslKey = !gEaselBypassOnly;
11474      }
11475  
11476      return OK;
11477  }
11478  
11479  /*===========================================================================
11480   * FUNCTION   : getCamInfo
11481   *
11482   * DESCRIPTION: query camera capabilities
11483   *
11484   * PARAMETERS :
11485   *   @cameraId  : camera Id
11486   *   @info      : camera info struct to be filled in with camera capabilities
11487   *
11488   * RETURN     : int type of status
11489   *              NO_ERROR  -- success
11490   *              none-zero failure code
11491   *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)11492  int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11493          struct camera_info *info)
11494  {
11495      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
11496      int rc = 0;
11497  
11498      pthread_mutex_lock(&gCamLock);
11499  
11500      {
11501          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
11502          rc = initHdrPlusClientLocked();
11503          if (rc != OK) {
11504              ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11505              pthread_mutex_unlock(&gCamLock);
11506              return rc;
11507          }
11508      }
11509  
11510      if (NULL == gCamCapability[cameraId]) {
11511          rc = initCapabilities(cameraId);
11512          if (rc < 0) {
11513              pthread_mutex_unlock(&gCamLock);
11514              return rc;
11515          }
11516      }
11517  
11518      if (NULL == gStaticMetadata[cameraId]) {
11519          rc = initStaticMetadata(cameraId);
11520          if (rc < 0) {
11521              pthread_mutex_unlock(&gCamLock);
11522              return rc;
11523          }
11524      }
11525  
11526      switch(gCamCapability[cameraId]->position) {
11527      case CAM_POSITION_BACK:
11528      case CAM_POSITION_BACK_AUX:
11529          info->facing = CAMERA_FACING_BACK;
11530          break;
11531  
11532      case CAM_POSITION_FRONT:
11533      case CAM_POSITION_FRONT_AUX:
11534          info->facing = CAMERA_FACING_FRONT;
11535          break;
11536  
11537      default:
11538          LOGE("Unknown position type %d for camera id:%d",
11539                  gCamCapability[cameraId]->position, cameraId);
11540          rc = -1;
11541          break;
11542      }
11543  
11544  
11545      info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
11546  #ifndef USE_HAL_3_3
11547      info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
11548  #else
11549      info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
11550  #endif
11551      info->static_camera_characteristics = gStaticMetadata[cameraId];
11552  
11553      //For now assume both cameras can operate independently.
11554      info->conflicting_devices = NULL;
11555      info->conflicting_devices_length = 0;
11556  
11557      //resource cost is 100 * MIN(1.0, m/M),
11558      //where m is throughput requirement with maximum stream configuration
11559      //and M is CPP maximum throughput.
11560      float max_fps = 0.0;
11561      for (uint32_t i = 0;
11562              i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11563          if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11564              max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11565      }
11566      float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11567              gCamCapability[cameraId]->active_array_size.width *
11568              gCamCapability[cameraId]->active_array_size.height * max_fps /
11569              gCamCapability[cameraId]->max_pixel_bandwidth;
11570      info->resource_cost = 100 * MIN(1.0, ratio);
11571      LOGI("camera %d resource cost is %d", cameraId,
11572              info->resource_cost);
11573  
11574      pthread_mutex_unlock(&gCamLock);
11575      return rc;
11576  }
11577  
11578  /*===========================================================================
11579   * FUNCTION   : translateCapabilityToMetadata
11580   *
11581   * DESCRIPTION: translate the capability into camera_metadata_t
11582   *
11583   * PARAMETERS : type of the request
11584   *
11585   *
11586   * RETURN     : success: camera_metadata_t*
11587   *              failure: NULL
11588   *
11589   *==========================================================================*/
translateCapabilityToMetadata(int type)11590  camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11591  {
11592      if (mDefaultMetadata[type] != NULL) {
11593          return mDefaultMetadata[type];
11594      }
11595      //first time we are handling this request
11596      //fill up the metadata structure using the wrapper class
11597      CameraMetadata settings;
11598      //translate from cam_capability_t to camera_metadata_tag_t
11599      static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11600      settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11601      int32_t defaultRequestID = 0;
11602      settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11603  
11604      /* OIS disable */
11605      char ois_prop[PROPERTY_VALUE_MAX];
11606      memset(ois_prop, 0, sizeof(ois_prop));
11607      property_get("persist.camera.ois.disable", ois_prop, "0");
11608      uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11609  
11610      /* Force video to use OIS */
11611      char videoOisProp[PROPERTY_VALUE_MAX];
11612      memset(videoOisProp, 0, sizeof(videoOisProp));
11613      property_get("persist.camera.ois.video", videoOisProp, "1");
11614      uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
11615  
11616      // Hybrid AE enable/disable
11617      char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11618      memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11619      property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11620      uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11621  
11622      uint8_t controlIntent = 0;
11623      uint8_t focusMode;
11624      uint8_t vsMode;
11625      uint8_t optStabMode;
11626      uint8_t cacMode;
11627      uint8_t edge_mode;
11628      uint8_t noise_red_mode;
11629      uint8_t shading_mode;
11630      uint8_t hot_pixel_mode;
11631      uint8_t tonemap_mode;
11632      bool highQualityModeEntryAvailable = FALSE;
11633      bool fastModeEntryAvailable = FALSE;
11634      uint8_t histogramEnable = false;
11635      vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11636      optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11637      uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
11638      uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
11639      uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
11640  
11641      switch (type) {
11642        case CAMERA3_TEMPLATE_PREVIEW:
11643          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11644          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11645          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11646          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11647          edge_mode = ANDROID_EDGE_MODE_FAST;
11648          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11649          shading_mode = ANDROID_SHADING_MODE_FAST;
11650          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11651          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11652          break;
11653        case CAMERA3_TEMPLATE_STILL_CAPTURE:
11654          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11655          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11656          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11657          edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11658          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11659          shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
11660          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
11661          tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11662          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11663          // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11664          for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11665              if (gCamCapability[mCameraId]->aberration_modes[i] ==
11666                      CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11667                  highQualityModeEntryAvailable = TRUE;
11668              } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11669                      CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11670                  fastModeEntryAvailable = TRUE;
11671              }
11672          }
11673          if (highQualityModeEntryAvailable) {
11674              cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11675          } else if (fastModeEntryAvailable) {
11676              cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11677          }
11678          if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11679              shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11680          }
11681          enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
11682          break;
11683        case CAMERA3_TEMPLATE_VIDEO_RECORD:
11684          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11685          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11686          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11687          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11688          edge_mode = ANDROID_EDGE_MODE_FAST;
11689          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11690          shading_mode = ANDROID_SHADING_MODE_FAST;
11691          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11692          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11693          if (forceVideoOis)
11694              optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11695          break;
11696        case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11697          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11698          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11699          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11700          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11701          edge_mode = ANDROID_EDGE_MODE_FAST;
11702          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11703          shading_mode = ANDROID_SHADING_MODE_FAST;
11704          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11705          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11706          if (forceVideoOis)
11707              optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11708          break;
11709        case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11710          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11711          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11712          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11713          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11714          edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11715          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11716          shading_mode = ANDROID_SHADING_MODE_FAST;
11717          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11718          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11719          break;
11720        case CAMERA3_TEMPLATE_MANUAL:
11721          edge_mode = ANDROID_EDGE_MODE_FAST;
11722          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11723          shading_mode = ANDROID_SHADING_MODE_FAST;
11724          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11725          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11726          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11727          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11728          focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11729          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11730          break;
11731        default:
11732          edge_mode = ANDROID_EDGE_MODE_FAST;
11733          noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11734          shading_mode = ANDROID_SHADING_MODE_FAST;
11735          hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11736          tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11737          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11738          controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11739          focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11740          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11741          break;
11742      }
11743      // Set CAC to OFF if underlying device doesn't support
11744      if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11745          cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11746      }
11747      settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11748      settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11749      settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11750      if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11751          focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11752      }
11753      settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
11754      settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
11755      settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
11756  
11757      if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11758              gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11759          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11760      else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11761              gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11762              || ois_disable)
11763          optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11764      settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
11765      settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
11766  
11767      settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11768              &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11769  
11770      static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11771      settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11772  
11773      static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11774      settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11775  
11776      static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11777      settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11778  
11779      static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11780      settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11781  
11782      static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11783      settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11784  
11785      static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11786      settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11787  
11788      static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11789      settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11790  
11791      /*flash*/
11792      static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11793      settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11794  
11795      static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11796      settings.update(ANDROID_FLASH_FIRING_POWER,
11797              &flashFiringLevel, 1);
11798  
11799      /* lens */
11800      float default_aperture = gCamCapability[mCameraId]->apertures[0];
11801      settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11802  
11803      if (gCamCapability[mCameraId]->filter_densities_count) {
11804          float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11805          settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11806                          gCamCapability[mCameraId]->filter_densities_count);
11807      }
11808  
11809      float default_focal_length = gCamCapability[mCameraId]->focal_length;
11810      settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11811  
11812      static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11813      settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11814  
11815      static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11816      settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11817  
11818      /* face detection (default to OFF) */
11819      static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11820      settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11821  
11822      static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11823      settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
11824  
11825      static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11826      settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11827  
11828      static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11829      settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11830  
11831  
11832      static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11833      settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11834  
11835      /* Exposure time(Update the Min Exposure Time)*/
11836      int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11837      settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11838  
11839      /* frame duration */
11840      static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11841      settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11842  
11843      /* sensitivity */
11844      static const int32_t default_sensitivity = 100;
11845      settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
11846  #ifndef USE_HAL_3_3
11847      static const int32_t default_isp_sensitivity =
11848              gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11849      settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11850  #endif
11851  
11852      /*edge mode*/
11853      settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11854  
11855      /*noise reduction mode*/
11856      settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11857  
11858      /*shading mode*/
11859      settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
11860  
11861      /*hot pixel mode*/
11862      settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
11863  
11864      /*color correction mode*/
11865      static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11866      settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11867  
11868      /*transform matrix mode*/
11869      settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11870  
11871      int32_t scaler_crop_region[4];
11872      scaler_crop_region[0] = 0;
11873      scaler_crop_region[1] = 0;
11874      scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11875      scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11876      settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11877  
11878      static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11879      settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11880  
11881      /*focus distance*/
11882      float focus_distance = 0.0;
11883      settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11884  
11885      /*target fps range: use maximum range for picture, and maximum fixed range for video*/
11886      /* Restrict template max_fps to 30 */
11887      float max_range = 0.0;
11888      float max_fixed_fps = 0.0;
11889      int32_t fps_range[2] = {0, 0};
11890      for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11891              i++) {
11892          if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11893                  TEMPLATE_MAX_PREVIEW_FPS) {
11894              continue;
11895          }
11896          float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11897              gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11898          if (type == CAMERA3_TEMPLATE_PREVIEW ||
11899                  type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11900                  type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11901              if (range > max_range) {
11902                  fps_range[0] =
11903                      (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11904                  fps_range[1] =
11905                      (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11906                  max_range = range;
11907              }
11908          } else {
11909              if (range < 0.01 && max_fixed_fps <
11910                      gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11911                  fps_range[0] =
11912                      (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11913                  fps_range[1] =
11914                      (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11915                  max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11916              }
11917          }
11918      }
11919      settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11920  
11921      /*precapture trigger*/
11922      uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11923      settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11924  
11925      /*af trigger*/
11926      uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11927      settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11928  
11929      /* ae & af regions */
11930      int32_t active_region[] = {
11931              gCamCapability[mCameraId]->active_array_size.left,
11932              gCamCapability[mCameraId]->active_array_size.top,
11933              gCamCapability[mCameraId]->active_array_size.left +
11934                      gCamCapability[mCameraId]->active_array_size.width,
11935              gCamCapability[mCameraId]->active_array_size.top +
11936                      gCamCapability[mCameraId]->active_array_size.height,
11937              0};
11938      settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11939              sizeof(active_region) / sizeof(active_region[0]));
11940      settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11941              sizeof(active_region) / sizeof(active_region[0]));
11942  
11943      /* black level lock */
11944      uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11945      settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11946  
11947      //special defaults for manual template
11948      if (type == CAMERA3_TEMPLATE_MANUAL) {
11949          static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11950          settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11951  
11952          static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11953          settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11954  
11955          static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11956          settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11957  
11958          static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11959          settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11960  
11961          static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11962          settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11963  
11964          static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11965          settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11966      }
11967  
11968  
11969      /* TNR
11970       * We'll use this location to determine which modes TNR will be set.
11971       * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11972       * This is not to be confused with linking on a per stream basis that decision
11973       * is still on per-session basis and will be handled as part of config stream
11974       */
11975      uint8_t tnr_enable = 0;
11976  
11977      if (m_bTnrPreview || m_bTnrVideo) {
11978  
11979          switch (type) {
11980              case CAMERA3_TEMPLATE_VIDEO_RECORD:
11981                      tnr_enable = 1;
11982                      break;
11983  
11984              default:
11985                      tnr_enable = 0;
11986                      break;
11987          }
11988  
11989          int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11990          settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11991          settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11992  
11993          LOGD("TNR:%d with process plate %d for template:%d",
11994                               tnr_enable, tnr_process_type, type);
11995      }
11996  
11997      //Update Link tags to default
11998      uint8_t sync_type = CAM_TYPE_STANDALONE;
11999      settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
12000  
12001      uint8_t is_main = 1;
12002      settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
12003  
12004      uint8_t related_camera_id = mCameraId;
12005      settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
12006  
12007      /* CDS default */
12008      char prop[PROPERTY_VALUE_MAX];
12009      memset(prop, 0, sizeof(prop));
12010      property_get("persist.camera.CDS", prop, "Auto");
12011      cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
12012      cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
12013      if (CAM_CDS_MODE_MAX == cds_mode) {
12014          cds_mode = CAM_CDS_MODE_AUTO;
12015      }
12016  
12017      /* Disabling CDS in templates which have TNR enabled*/
12018      if (tnr_enable)
12019          cds_mode = CAM_CDS_MODE_OFF;
12020  
12021      int32_t mode = cds_mode;
12022      settings.update(QCAMERA3_CDS_MODE, &mode, 1);
12023  
12024      /* Manual Convergence AEC Speed is disabled by default*/
12025      float default_aec_speed = 0;
12026      settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
12027  
12028      /* Manual Convergence AWB Speed is disabled by default*/
12029      float default_awb_speed = 0;
12030      settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
12031  
12032      // Set instant AEC to normal convergence by default
12033      uint8_t instant_aec_mode = (uint8_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
12034      settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
12035  
12036      uint8_t oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_OFF;
12037      if (mCameraId == 0) {
12038          oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_ON;
12039      }
12040      settings.update(ANDROID_STATISTICS_OIS_DATA_MODE, &oisDataMode, 1);
12041  
12042      if (gExposeEnableZslKey) {
12043          settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
12044          int32_t postview = 0;
12045          settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
12046          int32_t continuousZslCapture = 0;
12047          settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
12048          // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
12049          // CAMERA3_TEMPLATE_PREVIEW.
12050          int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12051                                    type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
12052          settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
12053  
12054          // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
12055          // hybrid ae is enabled for 3rd party app HDR+.
12056          if (type == CAMERA3_TEMPLATE_PREVIEW ||
12057                  type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
12058              hybrid_ae = 1;
12059          }
12060      }
12061      /* hybrid ae */
12062      settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
12063  
12064      int32_t fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
12065      settings.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
12066  
12067      mDefaultMetadata[type] = settings.release();
12068  
12069      return mDefaultMetadata[type];
12070  }
12071  
12072  /*===========================================================================
12073   * FUNCTION   : getExpectedFrameDuration
12074   *
12075   * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
12076   *              duration
12077   *
12078   * PARAMETERS :
12079   *   @request   : request settings
12080   *   @frameDuration : The maximum frame duration in nanoseconds
12081   *
12082   * RETURN     : None
12083   *==========================================================================*/
getExpectedFrameDuration(const camera_metadata_t * request,nsecs_t * frameDuration)12084  void QCamera3HardwareInterface::getExpectedFrameDuration(
12085          const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
12086      if (nullptr == frameDuration) {
12087          return;
12088      }
12089  
12090      camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12091      find_camera_metadata_ro_entry(request,
12092              ANDROID_SENSOR_EXPOSURE_TIME,
12093              &e);
12094      if (e.count > 0) {
12095          *frameDuration = e.data.i64[0];
12096      }
12097      find_camera_metadata_ro_entry(request,
12098              ANDROID_SENSOR_FRAME_DURATION,
12099              &e);
12100      if (e.count > 0) {
12101          *frameDuration = std::max(e.data.i64[0], *frameDuration);
12102      }
12103  }
12104  
12105  /*===========================================================================
12106   * FUNCTION   : calculateMaxExpectedDuration
12107   *
12108   * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
12109   *              current camera settings.
12110   *
12111   * PARAMETERS :
12112   *   @request   : request settings
12113   *
12114   * RETURN     : Expected frame duration in nanoseconds.
12115   *==========================================================================*/
calculateMaxExpectedDuration(const camera_metadata_t * request)12116  nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
12117          const camera_metadata_t *request) {
12118      nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
12119      camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12120      find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
12121      if (e.count == 0) {
12122          return maxExpectedDuration;
12123      }
12124  
12125      if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
12126          getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12127      }
12128  
12129      if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
12130          return maxExpectedDuration;
12131      }
12132  
12133      find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
12134      if (e.count == 0) {
12135          return maxExpectedDuration;
12136      }
12137  
12138      switch (e.data.u8[0]) {
12139          case ANDROID_CONTROL_AE_MODE_OFF:
12140              getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12141              break;
12142          default:
12143              find_camera_metadata_ro_entry(request,
12144                      ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
12145                      &e);
12146              if (e.count > 1) {
12147                  maxExpectedDuration = 1e9 / e.data.u8[0];
12148              }
12149              break;
12150      }
12151  
12152      return maxExpectedDuration;
12153  }
12154  
12155  /*===========================================================================
12156   * FUNCTION   : setFrameParameters
12157   *
12158   * DESCRIPTION: set parameters per frame as requested in the metadata from
12159   *              framework
12160   *
12161   * PARAMETERS :
12162   *   @request   : request that needs to be serviced
12163   *   @streamsArray : Stream ID of all the requested streams
12164   *   @blob_request: Whether this request is a blob request or not
12165   *
12166   * RETURN     : success: NO_ERROR
12167   *              failure:
12168   *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)12169  int QCamera3HardwareInterface::setFrameParameters(
12170                      camera3_capture_request_t *request,
12171                      cam_stream_ID_t streamsArray,
12172                      int blob_request,
12173                      uint32_t snapshotStreamId)
12174  {
12175      /*translate from camera_metadata_t type to parm_type_t*/
12176      int rc = 0;
12177      int32_t hal_version = CAM_HAL_V3;
12178  
12179      clear_metadata_buffer(mParameters);
12180      if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
12181          LOGE("Failed to set hal version in the parameters");
12182          return BAD_VALUE;
12183      }
12184  
12185      /*we need to update the frame number in the parameters*/
12186      if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
12187              request->frame_number)) {
12188          LOGE("Failed to set the frame number in the parameters");
12189          return BAD_VALUE;
12190      }
12191  
12192      /* Update stream id of all the requested buffers */
12193      if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
12194          LOGE("Failed to set stream type mask in the parameters");
12195          return BAD_VALUE;
12196      }
12197  
12198      if (mUpdateDebugLevel) {
12199          uint32_t dummyDebugLevel = 0;
12200          /* The value of dummyDebugLevel is irrelavent. On
12201           * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
12202          if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
12203                  dummyDebugLevel)) {
12204              LOGE("Failed to set UPDATE_DEBUG_LEVEL");
12205              return BAD_VALUE;
12206          }
12207          mUpdateDebugLevel = false;
12208      }
12209  
12210      if(request->settings != NULL){
12211          mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
12212          rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
12213          if (blob_request)
12214              memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
12215      }
12216  
12217      return rc;
12218  }
12219  
12220  /*===========================================================================
12221   * FUNCTION   : setReprocParameters
12222   *
12223   * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
12224   *              return it.
12225   *
12226   * PARAMETERS :
12227   *   @request   : request that needs to be serviced
12228   *
12229   * RETURN     : success: NO_ERROR
12230   *              failure:
12231   *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)12232  int32_t QCamera3HardwareInterface::setReprocParameters(
12233          camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
12234          uint32_t snapshotStreamId)
12235  {
12236      /*translate from camera_metadata_t type to parm_type_t*/
12237      int rc = 0;
12238  
12239      if (NULL == request->settings){
12240          LOGE("Reprocess settings cannot be NULL");
12241          return BAD_VALUE;
12242      }
12243  
12244      if (NULL == reprocParam) {
12245          LOGE("Invalid reprocessing metadata buffer");
12246          return BAD_VALUE;
12247      }
12248      clear_metadata_buffer(reprocParam);
12249  
12250      /*we need to update the frame number in the parameters*/
12251      if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
12252              request->frame_number)) {
12253          LOGE("Failed to set the frame number in the parameters");
12254          return BAD_VALUE;
12255      }
12256  
12257      rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
12258      if (rc < 0) {
12259          LOGE("Failed to translate reproc request");
12260          return rc;
12261      }
12262  
12263      CameraMetadata frame_settings;
12264      frame_settings = request->settings;
12265      if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
12266              frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
12267          int32_t *crop_count =
12268                  frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
12269          int32_t *crop_data =
12270                  frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
12271          int32_t *roi_map =
12272                  frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
12273          if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
12274              cam_crop_data_t crop_meta;
12275              memset(&crop_meta, 0, sizeof(cam_crop_data_t));
12276              crop_meta.num_of_streams = 1;
12277              crop_meta.crop_info[0].crop.left   = crop_data[0];
12278              crop_meta.crop_info[0].crop.top    = crop_data[1];
12279              crop_meta.crop_info[0].crop.width  = crop_data[2];
12280              crop_meta.crop_info[0].crop.height = crop_data[3];
12281  
12282              crop_meta.crop_info[0].roi_map.left =
12283                      roi_map[0];
12284              crop_meta.crop_info[0].roi_map.top =
12285                      roi_map[1];
12286              crop_meta.crop_info[0].roi_map.width =
12287                      roi_map[2];
12288              crop_meta.crop_info[0].roi_map.height =
12289                      roi_map[3];
12290  
12291              if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
12292                  rc = BAD_VALUE;
12293              }
12294              LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
12295                      request->input_buffer->stream,
12296                      crop_meta.crop_info[0].crop.left,
12297                      crop_meta.crop_info[0].crop.top,
12298                      crop_meta.crop_info[0].crop.width,
12299                      crop_meta.crop_info[0].crop.height);
12300              LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
12301                      request->input_buffer->stream,
12302                      crop_meta.crop_info[0].roi_map.left,
12303                      crop_meta.crop_info[0].roi_map.top,
12304                      crop_meta.crop_info[0].roi_map.width,
12305                      crop_meta.crop_info[0].roi_map.height);
12306              } else {
12307                  LOGE("Invalid reprocess crop count %d!", *crop_count);
12308              }
12309      } else {
12310          LOGE("No crop data from matching output stream");
12311      }
12312  
12313      /* These settings are not needed for regular requests so handle them specially for
12314         reprocess requests; information needed for EXIF tags */
12315      if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12316          int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12317                      (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12318          if (NAME_NOT_FOUND != val) {
12319              uint32_t flashMode = (uint32_t)val;
12320              if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12321                  rc = BAD_VALUE;
12322              }
12323          } else {
12324              LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12325                      frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12326          }
12327      } else {
12328          LOGH("No flash mode in reprocess settings");
12329      }
12330  
12331      if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12332          int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12333          if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12334              rc = BAD_VALUE;
12335          }
12336      } else {
12337          LOGH("No flash state in reprocess settings");
12338      }
12339  
12340      if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12341          uint8_t *reprocessFlags =
12342              frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12343          if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12344                  *reprocessFlags)) {
12345                  rc = BAD_VALUE;
12346          }
12347      }
12348  
12349      // Add exif debug data to internal metadata
12350      if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12351          mm_jpeg_debug_exif_params_t *debug_params =
12352                  (mm_jpeg_debug_exif_params_t *)frame_settings.find
12353                  (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12354          // AE
12355          if (debug_params->ae_debug_params_valid == TRUE) {
12356              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12357                      debug_params->ae_debug_params);
12358          }
12359          // AWB
12360          if (debug_params->awb_debug_params_valid == TRUE) {
12361              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12362                  debug_params->awb_debug_params);
12363          }
12364          // AF
12365         if (debug_params->af_debug_params_valid == TRUE) {
12366              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12367                     debug_params->af_debug_params);
12368          }
12369          // ASD
12370          if (debug_params->asd_debug_params_valid == TRUE) {
12371              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12372                      debug_params->asd_debug_params);
12373          }
12374          // Stats
12375          if (debug_params->stats_debug_params_valid == TRUE) {
12376              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12377                      debug_params->stats_debug_params);
12378         }
12379          // BE Stats
12380          if (debug_params->bestats_debug_params_valid == TRUE) {
12381              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12382                      debug_params->bestats_debug_params);
12383          }
12384          // BHIST
12385          if (debug_params->bhist_debug_params_valid == TRUE) {
12386              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12387                      debug_params->bhist_debug_params);
12388         }
12389          // 3A Tuning
12390          if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12391              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12392                      debug_params->q3a_tuning_debug_params);
12393          }
12394      }
12395  
12396      // Add metadata which reprocess needs
12397      if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12398          cam_reprocess_info_t *repro_info =
12399                  (cam_reprocess_info_t *)frame_settings.find
12400                  (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
12401          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
12402                  repro_info->sensor_crop_info);
12403          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
12404                  repro_info->camif_crop_info);
12405          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
12406                  repro_info->isp_crop_info);
12407          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
12408                  repro_info->cpp_crop_info);
12409          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
12410                  repro_info->af_focal_length_ratio);
12411          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
12412                  repro_info->pipeline_flip);
12413          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12414                  repro_info->af_roi);
12415          ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12416                  repro_info->dyn_mask);
12417          /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12418             CAM_INTF_PARM_ROTATION metadata then has been added in
12419             translateToHalMetadata. HAL need to keep this new rotation
12420             metadata. Otherwise, the old rotation info saved in the vendor tag
12421             would be used */
12422          IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12423                  CAM_INTF_PARM_ROTATION, reprocParam) {
12424              LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12425          } else {
12426              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
12427                      repro_info->rotation_info);
12428          }
12429      }
12430  
12431      /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12432         to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12433         roi.width and roi.height would be the final JPEG size.
12434         For now, HAL only checks this for reprocess request */
12435      if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12436              frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12437          uint8_t *enable =
12438              frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12439          if (*enable == TRUE) {
12440              int32_t *crop_data =
12441                      frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12442              cam_stream_crop_info_t crop_meta;
12443              memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12444              crop_meta.stream_id = 0;
12445              crop_meta.crop.left   = crop_data[0];
12446              crop_meta.crop.top    = crop_data[1];
12447              crop_meta.crop.width  = crop_data[2];
12448              crop_meta.crop.height = crop_data[3];
12449              // The JPEG crop roi should match cpp output size
12450              IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12451                      CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12452                  crop_meta.roi_map.left = 0;
12453                  crop_meta.roi_map.top = 0;
12454                  crop_meta.roi_map.width = cpp_crop->crop.width;
12455                  crop_meta.roi_map.height = cpp_crop->crop.height;
12456              }
12457              ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12458                      crop_meta);
12459              LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
12460                      crop_meta.crop.left, crop_meta.crop.top,
12461                      crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12462              LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
12463                      crop_meta.roi_map.left, crop_meta.roi_map.top,
12464                      crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12465  
12466              // Add JPEG scale information
12467              cam_dimension_t scale_dim;
12468              memset(&scale_dim, 0, sizeof(cam_dimension_t));
12469              if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12470                  int32_t *roi =
12471                      frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12472                  scale_dim.width = roi[2];
12473                  scale_dim.height = roi[3];
12474                  ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12475                      scale_dim);
12476                  LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12477                      scale_dim.width, scale_dim.height, mCameraId);
12478              }
12479          }
12480      }
12481  
12482      return rc;
12483  }
12484  
12485  /*===========================================================================
12486   * FUNCTION   : saveRequestSettings
12487   *
12488   * DESCRIPTION: Add any settings that might have changed to the request settings
12489   *              and save the settings to be applied on the frame
12490   *
12491   * PARAMETERS :
12492   *   @jpegMetadata : the extracted and/or modified jpeg metadata
12493   *   @request      : request with initial settings
12494   *
12495   * RETURN     :
12496   * camera_metadata_t* : pointer to the saved request settings
12497   *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)12498  camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12499          const CameraMetadata &jpegMetadata,
12500          camera3_capture_request_t *request)
12501  {
12502      camera_metadata_t *resultMetadata;
12503      CameraMetadata camMetadata;
12504      camMetadata = request->settings;
12505  
12506      if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12507          int32_t thumbnail_size[2];
12508          thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12509          thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12510          camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12511                  jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12512      }
12513  
12514      if (request->input_buffer != NULL) {
12515          uint8_t reprocessFlags = 1;
12516          camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12517                  (uint8_t*)&reprocessFlags,
12518                  sizeof(reprocessFlags));
12519      }
12520  
12521      resultMetadata = camMetadata.release();
12522      return resultMetadata;
12523  }
12524  
12525  /*===========================================================================
12526   * FUNCTION   : setHalFpsRange
12527   *
12528   * DESCRIPTION: set FPS range parameter
12529   *
12530   *
12531   * PARAMETERS :
12532   *   @settings    : Metadata from framework
12533   *   @hal_metadata: Metadata buffer
12534   *
12535   *
12536   * RETURN     : success: NO_ERROR
12537   *              failure:
12538   *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)12539  int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12540          metadata_buffer_t *hal_metadata)
12541  {
12542      int32_t rc = NO_ERROR;
12543      cam_fps_range_t fps_range;
12544      fps_range.min_fps = (float)
12545              settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12546      fps_range.max_fps = (float)
12547              settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12548      fps_range.video_min_fps = fps_range.min_fps;
12549      fps_range.video_max_fps = fps_range.max_fps;
12550  
12551      LOGD("aeTargetFpsRange fps: [%f %f]",
12552              fps_range.min_fps, fps_range.max_fps);
12553      /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12554       * follows:
12555       * ---------------------------------------------------------------|
12556       *      Video stream is absent in configure_streams               |
12557       *    (Camcorder preview before the first video record            |
12558       * ---------------------------------------------------------------|
12559       * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12560       *                   |             |             | vid_min/max_fps|
12561       * ---------------------------------------------------------------|
12562       *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12563       *                   |-------------|-------------|----------------|
12564       *                   |  [240, 240] |     240     |  [240, 240]    |
12565       * ---------------------------------------------------------------|
12566       *     Video stream is present in configure_streams               |
12567       * ---------------------------------------------------------------|
12568       * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12569       *                   |             |             | vid_min/max_fps|
12570       * ---------------------------------------------------------------|
12571       *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12572       * (camcorder prev   |-------------|-------------|----------------|
12573       *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
12574       *  is stopped)      |             |             |                |
12575       * ---------------------------------------------------------------|
12576       *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
12577       *                   |-------------|-------------|----------------|
12578       *                   |  [240, 240] |     240     |  [240, 240]    |
12579       * ---------------------------------------------------------------|
12580       * When Video stream is absent in configure_streams,
12581       * preview fps = sensor_fps / batchsize
12582       * Eg: for 240fps at batchSize 4, preview = 60fps
12583       *     for 120fps at batchSize 4, preview = 30fps
12584       *
12585       * When video stream is present in configure_streams, preview fps is as per
12586       * the ratio of preview buffers to video buffers requested in process
12587       * capture request
12588       */
12589      mBatchSize = 0;
12590      if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12591          fps_range.min_fps = fps_range.video_max_fps;
12592          fps_range.video_min_fps = fps_range.video_max_fps;
12593          int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12594                  fps_range.max_fps);
12595          if (NAME_NOT_FOUND != val) {
12596              cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12597              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12598                  return BAD_VALUE;
12599              }
12600  
12601              if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12602                  /* If batchmode is currently in progress and the fps changes,
12603                   * set the flag to restart the sensor */
12604                  if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12605                          (mHFRVideoFps != fps_range.max_fps)) {
12606                      mNeedSensorRestart = true;
12607                  }
12608                  mHFRVideoFps = fps_range.max_fps;
12609                  mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12610                  if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12611                      mBatchSize = MAX_HFR_BATCH_SIZE;
12612                  }
12613               }
12614              LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12615  
12616           }
12617      } else {
12618          /* HFR mode is session param in backend/ISP. This should be reset when
12619           * in non-HFR mode  */
12620          cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12621          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12622              return BAD_VALUE;
12623          }
12624      }
12625      if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12626          return BAD_VALUE;
12627      }
12628      LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12629              fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12630      return rc;
12631  }
12632  
12633  /*===========================================================================
12634   * FUNCTION   : translateToHalMetadata
12635   *
12636   * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12637   *
12638   *
12639   * PARAMETERS :
12640   *   @request  : request sent from framework
12641   *
12642   *
12643   * RETURN     : success: NO_ERROR
12644   *              failure:
12645   *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)12646  int QCamera3HardwareInterface::translateToHalMetadata
12647                                    (const camera3_capture_request_t *request,
12648                                     metadata_buffer_t *hal_metadata,
12649                                     uint32_t snapshotStreamId) {
12650      if (request == nullptr || hal_metadata == nullptr) {
12651          return BAD_VALUE;
12652      }
12653  
12654      int64_t minFrameDuration = getMinFrameDuration(request);
12655  
12656      return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12657              minFrameDuration);
12658  }
12659  
translateFwkMetadataToHalMetadata(const camera_metadata_t * frameworkMetadata,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId,int64_t minFrameDuration)12660  int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12661          const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12662          uint32_t snapshotStreamId, int64_t minFrameDuration) {
12663  
12664      int rc = 0;
12665      CameraMetadata frame_settings;
12666      frame_settings = frameworkMetadata;
12667  
12668      /* Do not change the order of the following list unless you know what you are
12669       * doing.
12670       * The order is laid out in such a way that parameters in the front of the table
12671       * may be used to override the parameters later in the table. Examples are:
12672       * 1. META_MODE should precede AEC/AWB/AF MODE
12673       * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12674       * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12675       * 4. Any mode should precede it's corresponding settings
12676       */
12677      if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12678          uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12679          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12680              rc = BAD_VALUE;
12681          }
12682          rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12683          if (rc != NO_ERROR) {
12684              LOGE("extractSceneMode failed");
12685          }
12686      }
12687  
12688      if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12689          uint8_t fwk_aeMode =
12690              frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12691          uint8_t aeMode;
12692          int32_t redeye;
12693  
12694          if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12695              aeMode = CAM_AE_MODE_OFF;
12696          } else if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH) {
12697              aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
12698          } else {
12699              aeMode = CAM_AE_MODE_ON;
12700          }
12701          if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12702              redeye = 1;
12703          } else {
12704              redeye = 0;
12705          }
12706  
12707          int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12708                  fwk_aeMode);
12709          if (NAME_NOT_FOUND != val) {
12710              int32_t flashMode = (int32_t)val;
12711              ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12712          }
12713  
12714          ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12715          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12716              rc = BAD_VALUE;
12717          }
12718      }
12719  
12720      if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12721          uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12722          int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12723                  fwk_whiteLevel);
12724          if (NAME_NOT_FOUND != val) {
12725              uint8_t whiteLevel = (uint8_t)val;
12726              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12727                  rc = BAD_VALUE;
12728              }
12729          }
12730      }
12731  
12732      if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12733          uint8_t fwk_cacMode =
12734                  frame_settings.find(
12735                          ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12736          int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12737                  fwk_cacMode);
12738          if (NAME_NOT_FOUND != val) {
12739              cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12740              bool entryAvailable = FALSE;
12741              // Check whether Frameworks set CAC mode is supported in device or not
12742              for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12743                  if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12744                      entryAvailable = TRUE;
12745                      break;
12746                  }
12747              }
12748              LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12749              // If entry not found then set the device supported mode instead of frameworks mode i.e,
12750              // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12751              // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12752              if (entryAvailable == FALSE) {
12753                  if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12754                      cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12755                  } else {
12756                      if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12757                          // High is not supported and so set the FAST as spec say's underlying
12758                          // device implementation can be the same for both modes.
12759                          cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12760                      } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12761                          // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12762                          // in order to avoid the fps drop due to high quality
12763                          cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12764                      } else {
12765                          cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12766                      }
12767                  }
12768              }
12769              LOGD("Final cacMode is %d", cacMode);
12770              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12771                  rc = BAD_VALUE;
12772              }
12773          } else {
12774              LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12775          }
12776      }
12777  
12778      uint8_t fwk_focusMode = 0;
12779      if (m_bForceInfinityAf == 0) {
12780          if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
12781              fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
12782              int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12783                      fwk_focusMode);
12784              if (NAME_NOT_FOUND != val) {
12785                  uint8_t focusMode = (uint8_t)val;
12786                  LOGD("set focus mode %d", focusMode);
12787                  if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12788                           CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12789                      rc = BAD_VALUE;
12790                  }
12791              }
12792          } else {
12793              LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
12794          }
12795      } else {
12796          uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12797          LOGE("Focus forced to infinity %d", focusMode);
12798          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12799              rc = BAD_VALUE;
12800          }
12801      }
12802  
12803      if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12804              fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
12805          float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12806          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12807                  focalDistance)) {
12808              rc = BAD_VALUE;
12809          }
12810      }
12811  
12812      if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12813          uint8_t fwk_antibandingMode =
12814                  frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12815          int val = lookupHalName(ANTIBANDING_MODES_MAP,
12816                  METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12817          if (NAME_NOT_FOUND != val) {
12818              uint32_t hal_antibandingMode = (uint32_t)val;
12819              if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12820                  if (m60HzZone) {
12821                      hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12822                  } else {
12823                      hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12824                  }
12825              }
12826              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12827                      hal_antibandingMode)) {
12828                  rc = BAD_VALUE;
12829              }
12830          }
12831      }
12832  
12833      if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12834          int32_t expCompensation = frame_settings.find(
12835                  ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12836          if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12837              expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12838          if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12839              expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
12840          LOGD("Setting compensation:%d", expCompensation);
12841          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12842                  expCompensation)) {
12843              rc = BAD_VALUE;
12844          }
12845      }
12846  
12847      if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12848          uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12849          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12850              rc = BAD_VALUE;
12851          }
12852      }
12853      if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12854          rc = setHalFpsRange(frame_settings, hal_metadata);
12855          if (rc != NO_ERROR) {
12856              LOGE("setHalFpsRange failed");
12857          }
12858      }
12859  
12860      if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12861          uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12862          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12863              rc = BAD_VALUE;
12864          }
12865      }
12866  
12867      if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12868          uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12869          int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12870                  fwk_effectMode);
12871          if (NAME_NOT_FOUND != val) {
12872              uint8_t effectMode = (uint8_t)val;
12873              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12874                  rc = BAD_VALUE;
12875              }
12876          }
12877      }
12878  
12879      if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12880          uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12881          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12882                  colorCorrectMode)) {
12883              rc = BAD_VALUE;
12884          }
12885      }
12886  
12887      if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12888          cam_color_correct_gains_t colorCorrectGains;
12889          for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12890              colorCorrectGains.gains[i] =
12891                      frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12892          }
12893          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12894                  colorCorrectGains)) {
12895              rc = BAD_VALUE;
12896          }
12897      }
12898  
12899      if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12900          cam_color_correct_matrix_t colorCorrectTransform;
12901          cam_rational_type_t transform_elem;
12902          size_t num = 0;
12903          for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12904             for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12905                transform_elem.numerator =
12906                   frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12907                transform_elem.denominator =
12908                   frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12909                colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12910                num++;
12911             }
12912          }
12913          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12914                  colorCorrectTransform)) {
12915              rc = BAD_VALUE;
12916          }
12917      }
12918  
12919      cam_trigger_t aecTrigger;
12920      aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12921      aecTrigger.trigger_id = -1;
12922      if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12923          frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12924          aecTrigger.trigger =
12925              frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12926          aecTrigger.trigger_id =
12927              frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12928          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12929                  aecTrigger)) {
12930              rc = BAD_VALUE;
12931          }
12932          LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12933                  aecTrigger.trigger, aecTrigger.trigger_id);
12934      }
12935  
12936      /*af_trigger must come with a trigger id*/
12937      if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12938          frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12939          cam_trigger_t af_trigger;
12940          af_trigger.trigger =
12941              frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12942          af_trigger.trigger_id =
12943              frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12944          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12945              rc = BAD_VALUE;
12946          }
12947          LOGD("AfTrigger: %d AfTriggerID: %d",
12948                  af_trigger.trigger, af_trigger.trigger_id);
12949      }
12950  
12951      if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12952          int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12953          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12954              rc = BAD_VALUE;
12955          }
12956      }
12957      if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12958          cam_edge_application_t edge_application;
12959          edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
12960  
12961          if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12962              edge_application.sharpness = 0;
12963          } else {
12964              edge_application.sharpness =
12965                      gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12966              if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12967                  int32_t sharpness =
12968                          frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12969                  if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12970                      sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12971                      LOGD("Setting edge mode sharpness %d", sharpness);
12972                      edge_application.sharpness = sharpness;
12973                  }
12974              }
12975          }
12976          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12977              rc = BAD_VALUE;
12978          }
12979      }
12980  
12981      if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12982          uint32_t flashMode = (uint32_t)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
12983          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_MODE, flashMode)) {
12984              rc = BAD_VALUE;
12985          }
12986  
12987          int32_t respectFlashMode = 1;
12988          if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12989              uint8_t fwk_aeMode =
12990                  frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12991              if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12992                      fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12993                      fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12994                  respectFlashMode = 0;
12995                  LOGH("AE Mode controls flash, ignore android.flash.mode");
12996              }
12997          }
12998          if (respectFlashMode) {
12999              int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
13000                      (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
13001              LOGH("flash mode after mapping %d", val);
13002              // To check: CAM_INTF_META_FLASH_MODE usage
13003              if (NAME_NOT_FOUND != val) {
13004                  uint8_t ledMode = (uint8_t)val;
13005                  if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, ledMode)) {
13006                      rc = BAD_VALUE;
13007                  }
13008              }
13009          }
13010      }
13011  
13012      if (frame_settings.exists(ANDROID_FLASH_STATE)) {
13013          int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.i32[0];
13014          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_STATE, flashState)) {
13015              rc = BAD_VALUE;
13016          }
13017      }
13018  
13019      if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
13020          uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
13021          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
13022              rc = BAD_VALUE;
13023          }
13024      }
13025  
13026      if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
13027          int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
13028          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
13029                  flashFiringTime)) {
13030              rc = BAD_VALUE;
13031          }
13032      }
13033  
13034      if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
13035          uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
13036          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
13037                  hotPixelMode)) {
13038              rc = BAD_VALUE;
13039          }
13040      }
13041  
13042      if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
13043          float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
13044          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
13045                  lensAperture)) {
13046              rc = BAD_VALUE;
13047          }
13048      }
13049  
13050      if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
13051          float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
13052          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
13053                  filterDensity)) {
13054              rc = BAD_VALUE;
13055          }
13056      }
13057  
13058      if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
13059          float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
13060          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
13061                  focalLength)) {
13062              rc = BAD_VALUE;
13063          }
13064      }
13065  
13066      if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
13067          uint8_t optStabMode =
13068                  frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
13069          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
13070                  optStabMode)) {
13071              rc = BAD_VALUE;
13072          }
13073      }
13074  
13075      if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
13076          uint8_t videoStabMode =
13077                  frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
13078          LOGD("videoStabMode from APP = %d", videoStabMode);
13079          if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
13080                  videoStabMode)) {
13081              rc = BAD_VALUE;
13082          }
13083      }
13084  
13085  
13086      if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
13087          uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
13088          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
13089                  noiseRedMode)) {
13090              rc = BAD_VALUE;
13091          }
13092      }
13093  
13094      if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
13095          float reprocessEffectiveExposureFactor =
13096              frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
13097          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
13098                  reprocessEffectiveExposureFactor)) {
13099              rc = BAD_VALUE;
13100          }
13101      }
13102  
13103      cam_crop_region_t scalerCropRegion;
13104      bool scalerCropSet = false;
13105      if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
13106          scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
13107          scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
13108          scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
13109          scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
13110  
13111          // Map coordinate system from active array to sensor output.
13112          mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
13113                  scalerCropRegion.width, scalerCropRegion.height);
13114  
13115          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
13116                  scalerCropRegion)) {
13117              rc = BAD_VALUE;
13118          }
13119          scalerCropSet = true;
13120      }
13121  
13122      if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
13123          int64_t sensorExpTime =
13124                  frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
13125          LOGD("setting sensorExpTime %lld", sensorExpTime);
13126          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
13127                  sensorExpTime)) {
13128              rc = BAD_VALUE;
13129          }
13130      }
13131  
13132      if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
13133          int64_t sensorFrameDuration =
13134                  frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
13135          sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
13136          if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
13137              sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
13138          LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
13139          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
13140                  sensorFrameDuration)) {
13141              rc = BAD_VALUE;
13142          }
13143      }
13144  
13145      if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
13146          int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
13147          if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
13148                  sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
13149          if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
13150                  sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
13151          LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
13152          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
13153                  sensorSensitivity)) {
13154              rc = BAD_VALUE;
13155          }
13156      }
13157  
13158  #ifndef USE_HAL_3_3
13159      if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
13160          int32_t ispSensitivity =
13161              frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
13162          if (ispSensitivity <
13163              gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
13164                  ispSensitivity =
13165                      gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
13166                  LOGD("clamp ispSensitivity to %d", ispSensitivity);
13167          }
13168          if (ispSensitivity >
13169              gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
13170                  ispSensitivity =
13171                      gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
13172                  LOGD("clamp ispSensitivity to %d", ispSensitivity);
13173          }
13174          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
13175                  ispSensitivity)) {
13176              rc = BAD_VALUE;
13177          }
13178      }
13179  #endif
13180  
13181      if (frame_settings.exists(ANDROID_SHADING_MODE)) {
13182          uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
13183          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
13184              rc = BAD_VALUE;
13185          }
13186      }
13187  
13188      if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
13189          uint8_t fwk_facedetectMode =
13190                  frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
13191  
13192          int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
13193                  fwk_facedetectMode);
13194  
13195          if (NAME_NOT_FOUND != val) {
13196              uint8_t facedetectMode = (uint8_t)val;
13197              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
13198                      facedetectMode)) {
13199                  rc = BAD_VALUE;
13200              }
13201          }
13202      }
13203  
13204      if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
13205          uint8_t histogramMode =
13206                  frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
13207          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13208                  histogramMode)) {
13209              rc = BAD_VALUE;
13210          }
13211      }
13212  
13213      if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
13214          uint8_t sharpnessMapMode =
13215                  frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
13216          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
13217                  sharpnessMapMode)) {
13218              rc = BAD_VALUE;
13219          }
13220      }
13221  
13222      if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
13223          uint8_t tonemapMode =
13224                  frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
13225          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
13226              rc = BAD_VALUE;
13227          }
13228      }
13229      /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
13230      /*All tonemap channels will have the same number of points*/
13231      if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
13232          frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
13233          frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
13234          cam_rgb_tonemap_curves tonemapCurves;
13235          tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
13236          if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
13237              LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
13238                       tonemapCurves.tonemap_points_cnt,
13239                      CAM_MAX_TONEMAP_CURVE_SIZE);
13240              tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
13241          }
13242  
13243          /* ch0 = G*/
13244          size_t point = 0;
13245          cam_tonemap_curve_t tonemapCurveGreen;
13246          for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13247              for (size_t j = 0; j < 2; j++) {
13248                 tonemapCurveGreen.tonemap_points[i][j] =
13249                    frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
13250                 point++;
13251              }
13252          }
13253          tonemapCurves.curves[0] = tonemapCurveGreen;
13254  
13255          /* ch 1 = B */
13256          point = 0;
13257          cam_tonemap_curve_t tonemapCurveBlue;
13258          for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13259              for (size_t j = 0; j < 2; j++) {
13260                 tonemapCurveBlue.tonemap_points[i][j] =
13261                    frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
13262                 point++;
13263              }
13264          }
13265          tonemapCurves.curves[1] = tonemapCurveBlue;
13266  
13267          /* ch 2 = R */
13268          point = 0;
13269          cam_tonemap_curve_t tonemapCurveRed;
13270          for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13271              for (size_t j = 0; j < 2; j++) {
13272                 tonemapCurveRed.tonemap_points[i][j] =
13273                    frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
13274                 point++;
13275              }
13276          }
13277          tonemapCurves.curves[2] = tonemapCurveRed;
13278  
13279          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
13280                  tonemapCurves)) {
13281              rc = BAD_VALUE;
13282          }
13283      }
13284  
13285      if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
13286          uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
13287          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
13288                  captureIntent)) {
13289              rc = BAD_VALUE;
13290          }
13291      }
13292  
13293      if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
13294          uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
13295          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
13296                  blackLevelLock)) {
13297              rc = BAD_VALUE;
13298          }
13299      }
13300  
13301      if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
13302          uint8_t lensShadingMapMode =
13303                  frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
13304          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
13305                  lensShadingMapMode)) {
13306              rc = BAD_VALUE;
13307          }
13308      }
13309  
13310      if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
13311          cam_area_t roi;
13312          bool reset = true;
13313          convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
13314  
13315          // Map coordinate system from active array to sensor output.
13316          mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13317                  roi.rect.height);
13318  
13319          if (scalerCropSet) {
13320              reset = resetIfNeededROI(&roi, &scalerCropRegion);
13321          }
13322          if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
13323              rc = BAD_VALUE;
13324          }
13325      }
13326  
13327      if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
13328          cam_area_t roi;
13329          bool reset = true;
13330          convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
13331  
13332          // Map coordinate system from active array to sensor output.
13333          mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13334                  roi.rect.height);
13335  
13336          if (scalerCropSet) {
13337              reset = resetIfNeededROI(&roi, &scalerCropRegion);
13338          }
13339          if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13340              rc = BAD_VALUE;
13341          }
13342      }
13343  
13344      // CDS for non-HFR non-video mode
13345      if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13346              !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13347          int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13348          if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13349              LOGE("Invalid CDS mode %d!", *fwk_cds);
13350          } else {
13351              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13352                      CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13353                  rc = BAD_VALUE;
13354              }
13355          }
13356      }
13357  
13358      // Video HDR
13359      cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
13360      if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
13361          vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13362      }
13363      if (m_bVideoHdrEnabled)
13364          vhdr = CAM_VIDEO_HDR_MODE_ON;
13365  
13366      int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13367  
13368      if(vhdr != curr_hdr_state)
13369          LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13370  
13371      rc = setVideoHdrMode(mParameters, vhdr);
13372      if (rc != NO_ERROR) {
13373          LOGE("setVideoHDR is failed");
13374      }
13375  
13376      //IR
13377      if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13378          cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13379                  frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
13380          uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13381          uint8_t isIRon = 0;
13382  
13383          (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
13384          if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13385              LOGE("Invalid IR mode %d!", fwk_ir);
13386          } else {
13387              if(isIRon != curr_ir_state )
13388                 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13389  
13390              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13391                      CAM_INTF_META_IR_MODE, fwk_ir)) {
13392                  rc = BAD_VALUE;
13393              }
13394          }
13395      }
13396  
13397      //Binning Correction Mode
13398      if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13399          cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13400                  frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13401          if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13402                  || (0 > fwk_binning_correction)) {
13403              LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13404          } else {
13405              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13406                      CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13407                  rc = BAD_VALUE;
13408              }
13409          }
13410      }
13411  
13412      if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13413          float aec_speed;
13414          aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13415          LOGD("AEC Speed :%f", aec_speed);
13416          if ( aec_speed < 0 ) {
13417              LOGE("Invalid AEC mode %f!", aec_speed);
13418          } else {
13419              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13420                      aec_speed)) {
13421                  rc = BAD_VALUE;
13422              }
13423          }
13424      }
13425  
13426      if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13427          float awb_speed;
13428          awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13429          LOGD("AWB Speed :%f", awb_speed);
13430          if ( awb_speed < 0 ) {
13431              LOGE("Invalid AWB mode %f!", awb_speed);
13432          } else {
13433              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13434                      awb_speed)) {
13435                  rc = BAD_VALUE;
13436              }
13437          }
13438      }
13439  
13440      // TNR
13441      if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13442          frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13443          uint8_t b_TnrRequested = 0;
13444          uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
13445          cam_denoise_param_t tnr;
13446          tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13447          tnr.process_plates =
13448              (cam_denoise_process_type_t)frame_settings.find(
13449              QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13450          b_TnrRequested = tnr.denoise_enable;
13451  
13452          if(b_TnrRequested != curr_tnr_state)
13453             LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13454  
13455          if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13456              rc = BAD_VALUE;
13457          }
13458      }
13459  
13460      if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
13461          int32_t* exposure_metering_mode =
13462                  frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
13463          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13464                  *exposure_metering_mode)) {
13465              rc = BAD_VALUE;
13466          }
13467      }
13468  
13469      if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13470          int32_t fwk_testPatternMode =
13471                  frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13472          int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13473                  METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13474  
13475          if (NAME_NOT_FOUND != testPatternMode) {
13476              cam_test_pattern_data_t testPatternData;
13477              memset(&testPatternData, 0, sizeof(testPatternData));
13478              testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13479              if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13480                      frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13481                  int32_t *fwk_testPatternData =
13482                          frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13483                  testPatternData.r = fwk_testPatternData[0];
13484                  testPatternData.b = fwk_testPatternData[3];
13485                  switch (gCamCapability[mCameraId]->color_arrangement) {
13486                      case CAM_FILTER_ARRANGEMENT_RGGB:
13487                      case CAM_FILTER_ARRANGEMENT_GRBG:
13488                          testPatternData.gr = fwk_testPatternData[1];
13489                          testPatternData.gb = fwk_testPatternData[2];
13490                          break;
13491                      case CAM_FILTER_ARRANGEMENT_GBRG:
13492                      case CAM_FILTER_ARRANGEMENT_BGGR:
13493                          testPatternData.gr = fwk_testPatternData[2];
13494                          testPatternData.gb = fwk_testPatternData[1];
13495                          break;
13496                      default:
13497                          LOGE("color arrangement %d is not supported",
13498                                  gCamCapability[mCameraId]->color_arrangement);
13499                          break;
13500                  }
13501              }
13502              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13503                      testPatternData)) {
13504                  rc = BAD_VALUE;
13505              }
13506          } else {
13507              LOGE("Invalid framework sensor test pattern mode %d",
13508                      fwk_testPatternMode);
13509          }
13510      }
13511  
13512      if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13513          size_t count = 0;
13514          camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13515          ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13516                  gps_coords.data.d, gps_coords.count, count);
13517          if (gps_coords.count != count) {
13518              rc = BAD_VALUE;
13519          }
13520      }
13521  
13522      if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13523          char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13524          size_t count = 0;
13525          const char *gps_methods_src = (const char *)
13526                  frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13527          memset(gps_methods, '\0', sizeof(gps_methods));
13528          strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13529          ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13530                  gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13531          if (GPS_PROCESSING_METHOD_SIZE != count) {
13532              rc = BAD_VALUE;
13533          }
13534      }
13535  
13536      if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13537          int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13538          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13539                  gps_timestamp)) {
13540              rc = BAD_VALUE;
13541          }
13542      }
13543  
13544      if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13545          int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13546          cam_rotation_info_t rotation_info;
13547          if (orientation == 0) {
13548             rotation_info.rotation = ROTATE_0;
13549          } else if (orientation == 90) {
13550             rotation_info.rotation = ROTATE_90;
13551          } else if (orientation == 180) {
13552             rotation_info.rotation = ROTATE_180;
13553          } else if (orientation == 270) {
13554             rotation_info.rotation = ROTATE_270;
13555          }
13556          rotation_info.device_rotation = ROTATE_0;
13557          rotation_info.streamId = snapshotStreamId;
13558          ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13559          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13560              rc = BAD_VALUE;
13561          }
13562      }
13563  
13564      if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13565          uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13566          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13567              rc = BAD_VALUE;
13568          }
13569      }
13570  
13571      if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13572          uint32_t thumb_quality = (uint32_t)
13573                  frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13574          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13575                  thumb_quality)) {
13576              rc = BAD_VALUE;
13577          }
13578      }
13579  
13580      if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13581          cam_dimension_t dim;
13582          dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13583          dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13584          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13585              rc = BAD_VALUE;
13586          }
13587      }
13588  
13589      // Internal metadata
13590      if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13591          size_t count = 0;
13592          camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13593          ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13594                  privatedata.data.i32, privatedata.count, count);
13595          if (privatedata.count != count) {
13596              rc = BAD_VALUE;
13597          }
13598      }
13599  
13600      // ISO/Exposure Priority
13601      if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13602          frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13603          cam_priority_mode_t mode =
13604                  (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13605          if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13606              cam_intf_parm_manual_3a_t use_iso_exp_pty;
13607              use_iso_exp_pty.previewOnly = FALSE;
13608              uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13609              use_iso_exp_pty.value = *ptr;
13610  
13611              if(CAM_ISO_PRIORITY == mode) {
13612                  if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13613                          use_iso_exp_pty)) {
13614                      rc = BAD_VALUE;
13615                  }
13616              }
13617              else {
13618                  if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13619                          use_iso_exp_pty)) {
13620                      rc = BAD_VALUE;
13621                  }
13622              }
13623  
13624              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13625                      rc = BAD_VALUE;
13626              }
13627          }
13628      } else {
13629          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13630              rc = BAD_VALUE;
13631          }
13632      }
13633  
13634      // Saturation
13635      if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13636          int32_t* use_saturation =
13637                  frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13638          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13639              rc = BAD_VALUE;
13640          }
13641      }
13642  
13643      // EV step
13644      if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13645              gCamCapability[mCameraId]->exp_compensation_step)) {
13646          rc = BAD_VALUE;
13647      }
13648  
13649      // CDS info
13650      if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13651          cam_cds_data_t *cdsData = (cam_cds_data_t *)
13652                  frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13653  
13654          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13655                  CAM_INTF_META_CDS_DATA, *cdsData)) {
13656              rc = BAD_VALUE;
13657          }
13658      }
13659  
13660      // Hybrid AE
13661      if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13662          uint8_t *hybrid_ae = (uint8_t *)
13663                  frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13664          // Motion tracking intent isn't compatible with hybrid ae.
13665          if (mCaptureIntent == CAM_INTENT_MOTION_TRACKING) {
13666              *hybrid_ae = 0;
13667          }
13668          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13669              rc = BAD_VALUE;
13670          }
13671      }
13672  
13673      // Motion Detection
13674      if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
13675          uint8_t *motion_detection = (uint8_t *)
13676                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8;
13677          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MOTION_DETECTION_ENABLE, *motion_detection)) {
13678              rc = BAD_VALUE;
13679          }
13680      }
13681  
13682      // Histogram
13683      if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13684          uint8_t histogramMode =
13685                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13686          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13687                  histogramMode)) {
13688              rc = BAD_VALUE;
13689          }
13690      }
13691  
13692      if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13693          int32_t histogramBins =
13694                   frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13695          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13696                  histogramBins)) {
13697              rc = BAD_VALUE;
13698          }
13699      }
13700  
13701      // Tracking AF
13702      if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13703          uint8_t trackingAfTrigger =
13704                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13705          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13706                  trackingAfTrigger)) {
13707              rc = BAD_VALUE;
13708          }
13709      }
13710  
13711      // Makernote
13712      camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13713      if (entry.count != 0) {
13714          if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13715              cam_makernote_t makernote;
13716              makernote.length = entry.count;
13717              memcpy(makernote.data, entry.data.u8, makernote.length);
13718              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13719                  rc = BAD_VALUE;
13720              }
13721          } else {
13722              ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13723                      MAX_MAKERNOTE_LENGTH);
13724              rc = BAD_VALUE;
13725          }
13726      }
13727  
13728      return rc;
13729  }
13730  
13731  /*===========================================================================
13732   * FUNCTION   : captureResultCb
13733   *
13734   * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13735   *
13736   * PARAMETERS :
13737   *   @frame  : frame information from mm-camera-interface
13738   *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13739   *   @userdata: userdata
13740   *
13741   * RETURN     : NONE
13742   *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)13743  void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13744                  camera3_stream_buffer_t *buffer,
13745                  uint32_t frame_number, bool isInputBuffer, void *userdata)
13746  {
13747      QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13748      if (hw == NULL) {
13749          LOGE("Invalid hw %p", hw);
13750          return;
13751      }
13752  
13753      hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13754      return;
13755  }
13756  
13757  /*===========================================================================
13758   * FUNCTION   : setBufferErrorStatus
13759   *
13760   * DESCRIPTION: Callback handler for channels to report any buffer errors
13761   *
13762   * PARAMETERS :
13763   *   @ch     : Channel on which buffer error is reported from
13764   *   @frame_number  : frame number on which buffer error is reported on
13765   *   @buffer_status : buffer error status
13766   *   @userdata: userdata
13767   *
13768   * RETURN     : NONE
13769   *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)13770  void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13771          uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13772  {
13773      QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13774      if (hw == NULL) {
13775          LOGE("Invalid hw %p", hw);
13776          return;
13777      }
13778  
13779      hw->setBufferErrorStatus(ch, frame_number, err);
13780      return;
13781  }
13782  
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)13783  void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13784          uint32_t frameNumber, camera3_buffer_status_t err)
13785  {
13786      LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13787      pthread_mutex_lock(&mMutex);
13788  
13789      for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13790          if (req.frame_number != frameNumber)
13791              continue;
13792          for (auto& k : req.mPendingBufferList) {
13793              if(k.stream->priv == ch) {
13794                  k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13795              }
13796          }
13797      }
13798  
13799      pthread_mutex_unlock(&mMutex);
13800      return;
13801  }
13802  /*===========================================================================
13803   * FUNCTION   : initialize
13804   *
13805   * DESCRIPTION: Pass framework callback pointers to HAL
13806   *
13807   * PARAMETERS :
13808   *
13809   *
13810   * RETURN     : Success : 0
13811   *              Failure: -ENODEV
13812   *==========================================================================*/
13813  
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)13814  int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13815                                    const camera3_callback_ops_t *callback_ops)
13816  {
13817      LOGD("E");
13818      QCamera3HardwareInterface *hw =
13819          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13820      if (!hw) {
13821          LOGE("NULL camera device");
13822          return -ENODEV;
13823      }
13824  
13825      int rc = hw->initialize(callback_ops);
13826      LOGD("X");
13827      return rc;
13828  }
13829  
13830  /*===========================================================================
13831   * FUNCTION   : configure_streams
13832   *
13833   * DESCRIPTION:
13834   *
13835   * PARAMETERS :
13836   *
13837   *
13838   * RETURN     : Success: 0
13839   *              Failure: -EINVAL (if stream configuration is invalid)
13840   *                       -ENODEV (fatal error)
13841   *==========================================================================*/
13842  
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)13843  int QCamera3HardwareInterface::configure_streams(
13844          const struct camera3_device *device,
13845          camera3_stream_configuration_t *stream_list)
13846  {
13847      LOGD("E");
13848      QCamera3HardwareInterface *hw =
13849          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13850      if (!hw) {
13851          LOGE("NULL camera device");
13852          return -ENODEV;
13853      }
13854      int rc = hw->configureStreams(stream_list);
13855      LOGD("X");
13856      return rc;
13857  }
13858  
13859  /*===========================================================================
13860   * FUNCTION   : construct_default_request_settings
13861   *
13862   * DESCRIPTION: Configure a settings buffer to meet the required use case
13863   *
13864   * PARAMETERS :
13865   *
13866   *
13867   * RETURN     : Success: Return valid metadata
13868   *              Failure: Return NULL
13869   *==========================================================================*/
13870  const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)13871      construct_default_request_settings(const struct camera3_device *device,
13872                                          int type)
13873  {
13874  
13875      LOGD("E");
13876      camera_metadata_t* fwk_metadata = NULL;
13877      QCamera3HardwareInterface *hw =
13878          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13879      if (!hw) {
13880          LOGE("NULL camera device");
13881          return NULL;
13882      }
13883  
13884      fwk_metadata = hw->translateCapabilityToMetadata(type);
13885  
13886      LOGD("X");
13887      return fwk_metadata;
13888  }
13889  
13890  /*===========================================================================
13891   * FUNCTION   : process_capture_request
13892   *
13893   * DESCRIPTION:
13894   *
13895   * PARAMETERS :
13896   *
13897   *
13898   * RETURN     :
13899   *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)13900  int QCamera3HardwareInterface::process_capture_request(
13901                      const struct camera3_device *device,
13902                      camera3_capture_request_t *request)
13903  {
13904      LOGD("E");
13905      CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
13906      QCamera3HardwareInterface *hw =
13907          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13908      if (!hw) {
13909          LOGE("NULL camera device");
13910          return -EINVAL;
13911      }
13912  
13913      int rc = hw->orchestrateRequest(request);
13914      LOGD("X");
13915      return rc;
13916  }
13917  
13918  /*===========================================================================
13919   * FUNCTION   : dump
13920   *
13921   * DESCRIPTION:
13922   *
13923   * PARAMETERS :
13924   *
13925   *
13926   * RETURN     :
13927   *==========================================================================*/
13928  
dump(const struct camera3_device * device,int fd)13929  void QCamera3HardwareInterface::dump(
13930                  const struct camera3_device *device, int fd)
13931  {
13932      /* Log level property is read when "adb shell dumpsys media.camera" is
13933         called so that the log level can be controlled without restarting
13934         the media server */
13935      getLogLevel();
13936  
13937      LOGD("E");
13938      QCamera3HardwareInterface *hw =
13939          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13940      if (!hw) {
13941          LOGE("NULL camera device");
13942          return;
13943      }
13944  
13945      hw->dump(fd);
13946      LOGD("X");
13947      return;
13948  }
13949  
13950  /*===========================================================================
13951   * FUNCTION   : flush
13952   *
13953   * DESCRIPTION:
13954   *
13955   * PARAMETERS :
13956   *
13957   *
13958   * RETURN     :
13959   *==========================================================================*/
13960  
flush(const struct camera3_device * device)13961  int QCamera3HardwareInterface::flush(
13962                  const struct camera3_device *device)
13963  {
13964      int rc;
13965      LOGD("E");
13966      QCamera3HardwareInterface *hw =
13967          reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13968      if (!hw) {
13969          LOGE("NULL camera device");
13970          return -EINVAL;
13971      }
13972  
13973      pthread_mutex_lock(&hw->mMutex);
13974      // Validate current state
13975      switch (hw->mState) {
13976          case STARTED:
13977              /* valid state */
13978              break;
13979  
13980          case ERROR:
13981              pthread_mutex_unlock(&hw->mMutex);
13982              hw->handleCameraDeviceError();
13983              return -ENODEV;
13984  
13985          default:
13986              LOGI("Flush returned during state %d", hw->mState);
13987              pthread_mutex_unlock(&hw->mMutex);
13988              return 0;
13989      }
13990      pthread_mutex_unlock(&hw->mMutex);
13991  
13992      rc = hw->flush(true /* restart channels */ );
13993      LOGD("X");
13994      return rc;
13995  }
13996  
13997  /*===========================================================================
13998   * FUNCTION   : close_camera_device
13999   *
14000   * DESCRIPTION:
14001   *
14002   * PARAMETERS :
14003   *
14004   *
14005   * RETURN     :
14006   *==========================================================================*/
close_camera_device(struct hw_device_t * device)14007  int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
14008  {
14009      int ret = NO_ERROR;
14010      QCamera3HardwareInterface *hw =
14011          reinterpret_cast<QCamera3HardwareInterface *>(
14012              reinterpret_cast<camera3_device_t *>(device)->priv);
14013      if (!hw) {
14014          LOGE("NULL camera device");
14015          return BAD_VALUE;
14016      }
14017  
14018      LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
14019      delete hw;
14020      LOGI("[KPI Perf]: X");
14021      CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
14022      return ret;
14023  }
14024  
14025  /*===========================================================================
14026   * FUNCTION   : getWaveletDenoiseProcessPlate
14027   *
14028   * DESCRIPTION: query wavelet denoise process plate
14029   *
14030   * PARAMETERS : None
14031   *
14032   * RETURN     : WNR prcocess plate value
14033   *==========================================================================*/
getWaveletDenoiseProcessPlate()14034  cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
14035  {
14036      char prop[PROPERTY_VALUE_MAX];
14037      memset(prop, 0, sizeof(prop));
14038      property_get("persist.denoise.process.plates", prop, "0");
14039      int processPlate = atoi(prop);
14040      switch(processPlate) {
14041      case 0:
14042          return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14043      case 1:
14044          return CAM_WAVELET_DENOISE_CBCR_ONLY;
14045      case 2:
14046          return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14047      case 3:
14048          return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14049      default:
14050          return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14051      }
14052  }
14053  
14054  
14055  /*===========================================================================
14056   * FUNCTION   : getTemporalDenoiseProcessPlate
14057   *
14058   * DESCRIPTION: query temporal denoise process plate
14059   *
14060   * PARAMETERS : None
14061   *
14062   * RETURN     : TNR prcocess plate value
14063   *==========================================================================*/
getTemporalDenoiseProcessPlate()14064  cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
14065  {
14066      char prop[PROPERTY_VALUE_MAX];
14067      memset(prop, 0, sizeof(prop));
14068      property_get("persist.tnr.process.plates", prop, "0");
14069      int processPlate = atoi(prop);
14070      switch(processPlate) {
14071      case 0:
14072          return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14073      case 1:
14074          return CAM_WAVELET_DENOISE_CBCR_ONLY;
14075      case 2:
14076          return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14077      case 3:
14078          return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14079      default:
14080          return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14081      }
14082  }
14083  
14084  
14085  /*===========================================================================
14086   * FUNCTION   : extractSceneMode
14087   *
14088   * DESCRIPTION: Extract scene mode from frameworks set metadata
14089   *
14090   * PARAMETERS :
14091   *      @frame_settings: CameraMetadata reference
14092   *      @metaMode: ANDROID_CONTORL_MODE
14093   *      @hal_metadata: hal metadata structure
14094   *
14095   * RETURN     : None
14096   *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)14097  int32_t QCamera3HardwareInterface::extractSceneMode(
14098          const CameraMetadata &frame_settings, uint8_t metaMode,
14099          metadata_buffer_t *hal_metadata)
14100  {
14101      int32_t rc = NO_ERROR;
14102      uint8_t sceneMode = CAM_SCENE_MODE_OFF;
14103  
14104      if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
14105          LOGD("Ignoring control mode OFF_KEEP_STATE");
14106          return NO_ERROR;
14107      }
14108  
14109      if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
14110          camera_metadata_ro_entry entry =
14111                  frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
14112          if (0 == entry.count)
14113              return rc;
14114  
14115          uint8_t fwk_sceneMode = entry.data.u8[0];
14116  
14117          int val = lookupHalName(SCENE_MODES_MAP,
14118                  sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
14119                  fwk_sceneMode);
14120          if (NAME_NOT_FOUND != val) {
14121              sceneMode = (uint8_t)val;
14122              LOGD("sceneMode: %d", sceneMode);
14123          }
14124      }
14125  
14126      if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
14127          rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
14128      }
14129  
14130      if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
14131          if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
14132              cam_hdr_param_t hdr_params;
14133              hdr_params.hdr_enable = 1;
14134              hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14135              hdr_params.hdr_need_1x = false;
14136              if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14137                      CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14138                  rc = BAD_VALUE;
14139              }
14140          }
14141  
14142          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14143                  CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
14144              rc = BAD_VALUE;
14145          }
14146      }
14147  
14148      if (mForceHdrSnapshot) {
14149          cam_hdr_param_t hdr_params;
14150          hdr_params.hdr_enable = 1;
14151          hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14152          hdr_params.hdr_need_1x = false;
14153          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14154                  CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14155              rc = BAD_VALUE;
14156          }
14157      }
14158  
14159      return rc;
14160  }
14161  
14162  /*===========================================================================
14163   * FUNCTION   : setVideoHdrMode
14164   *
14165   * DESCRIPTION: Set Video HDR mode from frameworks set metadata
14166   *
14167   * PARAMETERS :
14168   *      @hal_metadata: hal metadata structure
14169   *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
14170   *
14171   * RETURN     : None
14172   *==========================================================================*/
setVideoHdrMode(metadata_buffer_t * hal_metadata,cam_video_hdr_mode_t vhdr)14173  int32_t QCamera3HardwareInterface::setVideoHdrMode(
14174          metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
14175  {
14176      if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
14177          return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
14178      }
14179  
14180      LOGE("Invalid Video HDR mode %d!", vhdr);
14181      return BAD_VALUE;
14182  }
14183  
14184  /*===========================================================================
14185   * FUNCTION   : setSensorHDR
14186   *
14187   * DESCRIPTION: Enable/disable sensor HDR.
14188   *
14189   * PARAMETERS :
14190   *      @hal_metadata: hal metadata structure
14191   *      @enable: boolean whether to enable/disable sensor HDR
14192   *
14193   * RETURN     : None
14194   *==========================================================================*/
setSensorHDR(metadata_buffer_t * hal_metadata,bool enable,bool isVideoHdrEnable)14195  int32_t QCamera3HardwareInterface::setSensorHDR(
14196          metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
14197  {
14198      int32_t rc = NO_ERROR;
14199      cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
14200  
14201      if (enable) {
14202          char sensor_hdr_prop[PROPERTY_VALUE_MAX];
14203          memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
14204          #ifdef _LE_CAMERA_
14205          //Default to staggered HDR for IOT
14206          property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
14207          #else
14208          property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
14209          #endif
14210          sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
14211      }
14212  
14213      bool isSupported = false;
14214      switch (sensor_hdr) {
14215          case CAM_SENSOR_HDR_IN_SENSOR:
14216              if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14217                      CAM_QCOM_FEATURE_SENSOR_HDR) {
14218                  isSupported = true;
14219                  LOGD("Setting HDR mode In Sensor");
14220              }
14221              break;
14222          case CAM_SENSOR_HDR_ZIGZAG:
14223              if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14224                      CAM_QCOM_FEATURE_ZIGZAG_HDR) {
14225                  isSupported = true;
14226                  LOGD("Setting HDR mode Zigzag");
14227              }
14228              break;
14229          case CAM_SENSOR_HDR_STAGGERED:
14230              if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14231                      CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
14232                  isSupported = true;
14233                  LOGD("Setting HDR mode Staggered");
14234              }
14235              break;
14236          case CAM_SENSOR_HDR_OFF:
14237              isSupported = true;
14238              LOGD("Turning off sensor HDR");
14239              break;
14240          default:
14241              LOGE("HDR mode %d not supported", sensor_hdr);
14242              rc = BAD_VALUE;
14243              break;
14244      }
14245  
14246      if(isSupported) {
14247          if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14248                  CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
14249              rc = BAD_VALUE;
14250          } else {
14251              if(!isVideoHdrEnable)
14252                  m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
14253          }
14254      }
14255      return rc;
14256  }
14257  
14258  /*===========================================================================
14259   * FUNCTION   : needRotationReprocess
14260   *
14261   * DESCRIPTION: if rotation needs to be done by reprocess in pp
14262   *
14263   * PARAMETERS : none
14264   *
14265   * RETURN     : true: needed
14266   *              false: no need
14267   *==========================================================================*/
needRotationReprocess()14268  bool QCamera3HardwareInterface::needRotationReprocess()
14269  {
14270      if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
14271          // current rotation is not zero, and pp has the capability to process rotation
14272          LOGH("need do reprocess for rotation");
14273          return true;
14274      }
14275  
14276      return false;
14277  }
14278  
14279  /*===========================================================================
14280   * FUNCTION   : needReprocess
14281   *
14282   * DESCRIPTION: if reprocess in needed
14283   *
14284   * PARAMETERS : none
14285   *
14286   * RETURN     : true: needed
14287   *              false: no need
14288   *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)14289  bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
14290  {
14291      if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
14292          // TODO: add for ZSL HDR later
14293          // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
14294          if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
14295              LOGH("need do reprocess for ZSL WNR or min PP reprocess");
14296              return true;
14297          } else {
14298              LOGH("already post processed frame");
14299              return false;
14300          }
14301      }
14302      return needRotationReprocess();
14303  }
14304  
14305  /*===========================================================================
14306   * FUNCTION   : needJpegExifRotation
14307   *
14308   * DESCRIPTION: if rotation from jpeg is needed
14309   *
14310   * PARAMETERS : none
14311   *
14312   * RETURN     : true: needed
14313   *              false: no need
14314   *==========================================================================*/
needJpegExifRotation()14315  bool QCamera3HardwareInterface::needJpegExifRotation()
14316  {
14317      /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
14318      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14319         LOGD("Need use Jpeg EXIF Rotation");
14320         return true;
14321      }
14322      return false;
14323  }
14324  
14325  /*===========================================================================
14326   * FUNCTION   : addOfflineReprocChannel
14327   *
14328   * DESCRIPTION: add a reprocess channel that will do reprocess on frames
14329   *              coming from input channel
14330   *
14331   * PARAMETERS :
14332   *   @config  : reprocess configuration
14333   *   @inputChHandle : pointer to the input (source) channel
14334   *
14335   *
14336   * RETURN     : Ptr to the newly created channel obj. NULL if failed.
14337   *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)14338  QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
14339          const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
14340  {
14341      int32_t rc = NO_ERROR;
14342      QCamera3ReprocessChannel *pChannel = NULL;
14343  
14344      pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
14345              mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14346              config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
14347      if (NULL == pChannel) {
14348          LOGE("no mem for reprocess channel");
14349          return NULL;
14350      }
14351  
14352      rc = pChannel->initialize(IS_TYPE_NONE);
14353      if (rc != NO_ERROR) {
14354          LOGE("init reprocess channel failed, ret = %d", rc);
14355          delete pChannel;
14356          return NULL;
14357      }
14358  
14359      // pp feature config
14360      cam_pp_feature_config_t pp_config;
14361      memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14362  
14363      pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14364      if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14365              & CAM_QCOM_FEATURE_DSDN) {
14366          //Use CPP CDS incase h/w supports it.
14367          pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14368          pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14369      }
14370      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14371          pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14372      }
14373  
14374      if (config.hdr_param.hdr_enable) {
14375          pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14376          pp_config.hdr_param = config.hdr_param;
14377      }
14378  
14379      if (mForceHdrSnapshot) {
14380          pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14381          pp_config.hdr_param.hdr_enable = 1;
14382          pp_config.hdr_param.hdr_need_1x = 0;
14383          pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14384      }
14385  
14386      rc = pChannel->addReprocStreamsFromSource(pp_config,
14387              config,
14388              IS_TYPE_NONE,
14389              mMetadataChannel);
14390  
14391      if (rc != NO_ERROR) {
14392          delete pChannel;
14393          return NULL;
14394      }
14395      return pChannel;
14396  }
14397  
14398  /*===========================================================================
14399   * FUNCTION   : getMobicatMask
14400   *
14401   * DESCRIPTION: returns mobicat mask
14402   *
14403   * PARAMETERS : none
14404   *
14405   * RETURN     : mobicat mask
14406   *
14407   *==========================================================================*/
getMobicatMask()14408  uint8_t QCamera3HardwareInterface::getMobicatMask()
14409  {
14410      return m_MobicatMask;
14411  }
14412  
14413  /*===========================================================================
14414   * FUNCTION   : setMobicat
14415   *
14416   * DESCRIPTION: set Mobicat on/off.
14417   *
14418   * PARAMETERS :
14419   *   @params  : none
14420   *
14421   * RETURN     : int32_t type of status
14422   *              NO_ERROR  -- success
14423   *              none-zero failure code
14424   *==========================================================================*/
setMobicat()14425  int32_t QCamera3HardwareInterface::setMobicat()
14426  {
14427      int32_t ret = NO_ERROR;
14428  
14429      if (m_MobicatMask) {
14430          tune_cmd_t tune_cmd;
14431          tune_cmd.type = SET_RELOAD_CHROMATIX;
14432          tune_cmd.module = MODULE_ALL;
14433          tune_cmd.value = TRUE;
14434          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14435                  CAM_INTF_PARM_SET_VFE_COMMAND,
14436                  tune_cmd);
14437  
14438          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14439                  CAM_INTF_PARM_SET_PP_COMMAND,
14440                  tune_cmd);
14441      }
14442  
14443      return ret;
14444  }
14445  
14446  /*===========================================================================
14447  * FUNCTION   : getLogLevel
14448  *
14449  * DESCRIPTION: Reads the log level property into a variable
14450  *
14451  * PARAMETERS :
14452  *   None
14453  *
14454  * RETURN     :
14455  *   None
14456  *==========================================================================*/
getLogLevel()14457  void QCamera3HardwareInterface::getLogLevel()
14458  {
14459      char prop[PROPERTY_VALUE_MAX];
14460      uint32_t globalLogLevel = 0;
14461  
14462      property_get("persist.camera.hal.debug", prop, "0");
14463      int val = atoi(prop);
14464      if (0 <= val) {
14465          gCamHal3LogLevel = (uint32_t)val;
14466      }
14467  
14468      property_get("persist.camera.kpi.debug", prop, "0");
14469      gKpiDebugLevel = atoi(prop);
14470  
14471      property_get("persist.camera.global.debug", prop, "0");
14472      val = atoi(prop);
14473      if (0 <= val) {
14474          globalLogLevel = (uint32_t)val;
14475      }
14476  
14477      /* Highest log level among hal.logs and global.logs is selected */
14478      if (gCamHal3LogLevel < globalLogLevel)
14479          gCamHal3LogLevel = globalLogLevel;
14480  
14481      return;
14482  }
14483  
14484  /*===========================================================================
14485   * FUNCTION   : validateStreamRotations
14486   *
14487   * DESCRIPTION: Check if the rotations requested are supported
14488   *
14489   * PARAMETERS :
14490   *   @stream_list : streams to be configured
14491   *
14492   * RETURN     : NO_ERROR on success
14493   *              -EINVAL on failure
14494   *
14495   *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)14496  int QCamera3HardwareInterface::validateStreamRotations(
14497          camera3_stream_configuration_t *streamList)
14498  {
14499      int rc = NO_ERROR;
14500  
14501      /*
14502      * Loop through all streams requested in configuration
14503      * Check if unsupported rotations have been requested on any of them
14504      */
14505      for (size_t j = 0; j < streamList->num_streams; j++){
14506          camera3_stream_t *newStream = streamList->streams[j];
14507  
14508          switch(newStream->rotation) {
14509              case CAMERA3_STREAM_ROTATION_0:
14510              case CAMERA3_STREAM_ROTATION_90:
14511              case CAMERA3_STREAM_ROTATION_180:
14512              case CAMERA3_STREAM_ROTATION_270:
14513                  //Expected values
14514                  break;
14515              default:
14516                  ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14517                          "type:%d and stream format:%d", __func__,
14518                          newStream->rotation, newStream->stream_type,
14519                          newStream->format);
14520                  return -EINVAL;
14521          }
14522  
14523          bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14524          bool isImplDef = (newStream->format ==
14525                  HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14526          bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14527                  isImplDef);
14528  
14529          if (isRotated && (!isImplDef || isZsl)) {
14530              LOGE("Error: Unsupported rotation of %d requested for stream"
14531                      "type:%d and stream format:%d",
14532                      newStream->rotation, newStream->stream_type,
14533                      newStream->format);
14534              rc = -EINVAL;
14535              break;
14536          }
14537      }
14538  
14539      return rc;
14540  }
14541  
14542  /*===========================================================================
14543  * FUNCTION   : getFlashInfo
14544  *
14545  * DESCRIPTION: Retrieve information about whether the device has a flash.
14546  *
14547  * PARAMETERS :
14548  *   @cameraId  : Camera id to query
14549  *   @hasFlash  : Boolean indicating whether there is a flash device
14550  *                associated with given camera
14551  *   @flashNode : If a flash device exists, this will be its device node.
14552  *
14553  * RETURN     :
14554  *   None
14555  *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])14556  void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14557          bool& hasFlash,
14558          char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14559  {
14560      cam_capability_t* camCapability = gCamCapability[cameraId];
14561      if (NULL == camCapability) {
14562          hasFlash = false;
14563          flashNode[0] = '\0';
14564      } else {
14565          hasFlash = camCapability->flash_available;
14566          strlcpy(flashNode,
14567                  (char*)camCapability->flash_dev_name,
14568                  QCAMERA_MAX_FILEPATH_LENGTH);
14569      }
14570  }
14571  
14572  /*===========================================================================
14573  * FUNCTION   : getEepromVersionInfo
14574  *
14575  * DESCRIPTION: Retrieve version info of the sensor EEPROM data
14576  *
14577  * PARAMETERS : None
14578  *
14579  * RETURN     : string describing EEPROM version
14580  *              "\0" if no such info available
14581  *==========================================================================*/
getEepromVersionInfo()14582  const char *QCamera3HardwareInterface::getEepromVersionInfo()
14583  {
14584      return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14585  }
14586  
14587  /*===========================================================================
14588  * FUNCTION   : getLdafCalib
14589  *
14590  * DESCRIPTION: Retrieve Laser AF calibration data
14591  *
14592  * PARAMETERS : None
14593  *
14594  * RETURN     : Two uint32_t describing laser AF calibration data
14595  *              NULL if none is available.
14596  *==========================================================================*/
getLdafCalib()14597  const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14598  {
14599      if (mLdafCalibExist) {
14600          return &mLdafCalib[0];
14601      } else {
14602          return NULL;
14603      }
14604  }
14605  
14606  /*===========================================================================
14607  * FUNCTION   : getEaselFwVersion
14608  *
14609  * DESCRIPTION: Retrieve Easel firmware version
14610  *
14611  * PARAMETERS : None
14612  *
14613  * RETURN     : string describing Firmware version
14614  *              "\0" if version is not up to date
14615  *==========================================================================*/
getEaselFwVersion()14616  const char *QCamera3HardwareInterface::getEaselFwVersion()
14617  {
14618      if (mEaselFwUpdated) {
14619          return (const char *)&mEaselFwVersion[0];
14620      } else {
14621          return NULL;
14622      }
14623  }
14624  
14625  /*===========================================================================
14626   * FUNCTION   : dynamicUpdateMetaStreamInfo
14627   *
14628   * DESCRIPTION: This function:
14629   *             (1) stops all the channels
14630   *             (2) returns error on pending requests and buffers
14631   *             (3) sends metastream_info in setparams
14632   *             (4) starts all channels
14633   *             This is useful when sensor has to be restarted to apply any
14634   *             settings such as frame rate from a different sensor mode
14635   *
14636   * PARAMETERS : None
14637   *
14638   * RETURN     : NO_ERROR on success
14639   *              Error codes on failure
14640   *
14641   *==========================================================================*/
dynamicUpdateMetaStreamInfo()14642  int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14643  {
14644      ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
14645      int rc = NO_ERROR;
14646  
14647      LOGD("E");
14648  
14649      rc = stopAllChannels();
14650      if (rc < 0) {
14651          LOGE("stopAllChannels failed");
14652          return rc;
14653      }
14654  
14655      rc = notifyErrorForPendingRequests();
14656      if (rc < 0) {
14657          LOGE("notifyErrorForPendingRequests failed");
14658          return rc;
14659      }
14660  
14661      for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14662          LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14663                  "Format:%d",
14664                  mStreamConfigInfo.type[i],
14665                  mStreamConfigInfo.stream_sizes[i].width,
14666                  mStreamConfigInfo.stream_sizes[i].height,
14667                  mStreamConfigInfo.postprocess_mask[i],
14668                  mStreamConfigInfo.format[i]);
14669      }
14670  
14671      /* Send meta stream info once again so that ISP can start */
14672      ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14673              CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14674      rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14675              mParameters);
14676      if (rc < 0) {
14677          LOGE("set Metastreaminfo failed. Sensor mode does not change");
14678      }
14679  
14680      rc = startAllChannels();
14681      if (rc < 0) {
14682          LOGE("startAllChannels failed");
14683          return rc;
14684      }
14685  
14686      LOGD("X");
14687      return rc;
14688  }
14689  
14690  /*===========================================================================
14691   * FUNCTION   : stopAllChannels
14692   *
14693   * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14694   *
14695   * PARAMETERS : None
14696   *
14697   * RETURN     : NO_ERROR on success
14698   *              Error codes on failure
14699   *
14700   *==========================================================================*/
stopAllChannels()14701  int32_t QCamera3HardwareInterface::stopAllChannels()
14702  {
14703      int32_t rc = NO_ERROR;
14704  
14705      LOGD("Stopping all channels");
14706      // Stop the Streams/Channels
14707      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14708          it != mStreamInfo.end(); it++) {
14709          QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14710          if (channel) {
14711              channel->stop();
14712          }
14713          (*it)->status = INVALID;
14714      }
14715  
14716      if (mSupportChannel) {
14717          mSupportChannel->stop();
14718      }
14719      if (mAnalysisChannel) {
14720          mAnalysisChannel->stop();
14721      }
14722      if (mRawDumpChannel) {
14723          mRawDumpChannel->stop();
14724      }
14725      if (mHdrPlusRawSrcChannel) {
14726          mHdrPlusRawSrcChannel->stop();
14727      }
14728      if (mMetadataChannel) {
14729          /* If content of mStreamInfo is not 0, there is metadata stream */
14730          mMetadataChannel->stop();
14731      }
14732  
14733      LOGD("All channels stopped");
14734      return rc;
14735  }
14736  
14737  /*===========================================================================
14738   * FUNCTION   : startAllChannels
14739   *
14740   * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14741   *
14742   * PARAMETERS : None
14743   *
14744   * RETURN     : NO_ERROR on success
14745   *              Error codes on failure
14746   *
14747   *==========================================================================*/
startAllChannels()14748  int32_t QCamera3HardwareInterface::startAllChannels()
14749  {
14750      int32_t rc = NO_ERROR;
14751  
14752      LOGD("Start all channels ");
14753      // Start the Streams/Channels
14754      if (mMetadataChannel) {
14755          /* If content of mStreamInfo is not 0, there is metadata stream */
14756          rc = mMetadataChannel->start();
14757          if (rc < 0) {
14758              LOGE("META channel start failed");
14759              return rc;
14760          }
14761      }
14762      for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14763          it != mStreamInfo.end(); it++) {
14764          QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14765          if (channel) {
14766              rc = channel->start();
14767              if (rc < 0) {
14768                  LOGE("channel start failed");
14769                  return rc;
14770              }
14771          }
14772      }
14773      if (mAnalysisChannel) {
14774          mAnalysisChannel->start();
14775      }
14776      if (mSupportChannel) {
14777          rc = mSupportChannel->start();
14778          if (rc < 0) {
14779              LOGE("Support channel start failed");
14780              return rc;
14781          }
14782      }
14783      if (mRawDumpChannel) {
14784          rc = mRawDumpChannel->start();
14785          if (rc < 0) {
14786              LOGE("RAW dump channel start failed");
14787              return rc;
14788          }
14789      }
14790      if (mHdrPlusRawSrcChannel) {
14791          rc = mHdrPlusRawSrcChannel->start();
14792          if (rc < 0) {
14793              LOGE("HDR+ RAW channel start failed");
14794              return rc;
14795          }
14796      }
14797  
14798      LOGD("All channels started");
14799      return rc;
14800  }
14801  
14802  /*===========================================================================
14803   * FUNCTION   : notifyErrorForPendingRequests
14804   *
14805   * DESCRIPTION: This function sends error for all the pending requests/buffers
14806   *
14807   * PARAMETERS : None
14808   *
14809   * RETURN     : Error codes
14810   *              NO_ERROR on success
14811   *
14812   *==========================================================================*/
notifyErrorForPendingRequests()14813  int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14814  {
14815      notifyErrorFoPendingDepthData(mDepthChannel);
14816  
14817      auto pendingRequest = mPendingRequestsList.begin();
14818      auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14819  
14820      // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14821      // buffers (for which buffers aren't sent yet).
14822      while (pendingRequest != mPendingRequestsList.end() ||
14823             pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14824          if (pendingRequest == mPendingRequestsList.end() ||
14825              pendingBuffer->frame_number < pendingRequest->frame_number) {
14826              // If metadata for this frame was sent, notify about a buffer error and returns buffers
14827              // with error.
14828              for (auto &info : pendingBuffer->mPendingBufferList) {
14829                  // Send a buffer error for this frame number.
14830                  camera3_notify_msg_t notify_msg;
14831                  memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14832                  notify_msg.type = CAMERA3_MSG_ERROR;
14833                  notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14834                  notify_msg.message.error.error_stream = info.stream;
14835                  notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14836                  orchestrateNotify(&notify_msg);
14837  
14838                  camera3_stream_buffer_t buffer = {};
14839                  buffer.acquire_fence = -1;
14840                  buffer.release_fence = -1;
14841                  buffer.buffer = info.buffer;
14842                  buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14843                  buffer.stream = info.stream;
14844                  mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14845              }
14846  
14847              pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14848          } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14849                     pendingBuffer->frame_number > pendingRequest->frame_number) {
14850              // If the buffers for this frame were sent already, notify about a result error.
14851              camera3_notify_msg_t notify_msg;
14852              memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14853              notify_msg.type = CAMERA3_MSG_ERROR;
14854              notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14855              notify_msg.message.error.error_stream = nullptr;
14856              notify_msg.message.error.frame_number = pendingRequest->frame_number;
14857              orchestrateNotify(&notify_msg);
14858  
14859              if (pendingRequest->input_buffer != nullptr) {
14860                  camera3_capture_result result = {};
14861                  result.frame_number = pendingRequest->frame_number;
14862                  result.result = nullptr;
14863                  result.input_buffer = pendingRequest->input_buffer;
14864                  orchestrateResult(&result);
14865              }
14866  
14867              mShutterDispatcher.clear(pendingRequest->frame_number);
14868              pendingRequest = mPendingRequestsList.erase(pendingRequest);
14869          } else {
14870              // If both buffers and result metadata weren't sent yet, notify about a request error
14871              // and return buffers with error.
14872              for (auto &info : pendingBuffer->mPendingBufferList) {
14873                  camera3_notify_msg_t notify_msg;
14874                  memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14875                  notify_msg.type = CAMERA3_MSG_ERROR;
14876                  notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14877                  notify_msg.message.error.error_stream = info.stream;
14878                  notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14879                  orchestrateNotify(&notify_msg);
14880  
14881                  camera3_stream_buffer_t buffer = {};
14882                  buffer.acquire_fence = -1;
14883                  buffer.release_fence = -1;
14884                  buffer.buffer = info.buffer;
14885                  buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14886                  buffer.stream = info.stream;
14887                  mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14888              }
14889  
14890              if (pendingRequest->input_buffer != nullptr) {
14891                  camera3_capture_result result = {};
14892                  result.frame_number = pendingRequest->frame_number;
14893                  result.result = nullptr;
14894                  result.input_buffer = pendingRequest->input_buffer;
14895                  orchestrateResult(&result);
14896              }
14897  
14898              mShutterDispatcher.clear(pendingRequest->frame_number);
14899              pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14900              pendingRequest = mPendingRequestsList.erase(pendingRequest);
14901          }
14902      }
14903  
14904      /* Reset pending frame Drop list and requests list */
14905      mPendingFrameDropList.clear();
14906      mShutterDispatcher.clear();
14907      mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
14908      mPendingBuffersMap.mPendingBuffersInRequest.clear();
14909      mExpectedFrameDuration = 0;
14910      mExpectedInflightDuration = 0;
14911      LOGH("Cleared all the pending buffers ");
14912  
14913      return NO_ERROR;
14914  }
14915  
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)14916  bool QCamera3HardwareInterface::isOnEncoder(
14917          const cam_dimension_t max_viewfinder_size,
14918          uint32_t width, uint32_t height)
14919  {
14920      return ((width > (uint32_t)max_viewfinder_size.width) ||
14921              (height > (uint32_t)max_viewfinder_size.height) ||
14922              (width > (uint32_t)VIDEO_4K_WIDTH) ||
14923              (height > (uint32_t)VIDEO_4K_HEIGHT));
14924  }
14925  
14926  /*===========================================================================
14927   * FUNCTION   : setBundleInfo
14928   *
14929   * DESCRIPTION: Set bundle info for all streams that are bundle.
14930   *
14931   * PARAMETERS : None
14932   *
14933   * RETURN     : NO_ERROR on success
14934   *              Error codes on failure
14935   *==========================================================================*/
setBundleInfo()14936  int32_t QCamera3HardwareInterface::setBundleInfo()
14937  {
14938      int32_t rc = NO_ERROR;
14939  
14940      if (mChannelHandle) {
14941          cam_bundle_config_t bundleInfo;
14942          memset(&bundleInfo, 0, sizeof(bundleInfo));
14943          rc = mCameraHandle->ops->get_bundle_info(
14944                  mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14945          if (rc != NO_ERROR) {
14946              LOGE("get_bundle_info failed");
14947              return rc;
14948          }
14949          if (mAnalysisChannel) {
14950              mAnalysisChannel->setBundleInfo(bundleInfo);
14951          }
14952          if (mSupportChannel) {
14953              mSupportChannel->setBundleInfo(bundleInfo);
14954          }
14955          for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14956                  it != mStreamInfo.end(); it++) {
14957              QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14958              channel->setBundleInfo(bundleInfo);
14959          }
14960          if (mRawDumpChannel) {
14961              mRawDumpChannel->setBundleInfo(bundleInfo);
14962          }
14963          if (mHdrPlusRawSrcChannel) {
14964              mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14965          }
14966      }
14967  
14968      return rc;
14969  }
14970  
14971  /*===========================================================================
14972   * FUNCTION   : setInstantAEC
14973   *
14974   * DESCRIPTION: Set Instant AEC related params.
14975   *
14976   * PARAMETERS :
14977   *      @meta: CameraMetadata reference
14978   *
14979   * RETURN     : NO_ERROR on success
14980   *              Error codes on failure
14981   *==========================================================================*/
setInstantAEC(const CameraMetadata & meta)14982  int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14983  {
14984      int32_t rc = NO_ERROR;
14985      uint8_t val = 0;
14986      char prop[PROPERTY_VALUE_MAX];
14987  
14988      // First try to configure instant AEC from framework metadata
14989      if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14990          val = meta.find(QCAMERA3_INSTANT_AEC_MODE).data.u8[0];
14991          LOGE("Instant AEC mode set: %d", val);
14992      }
14993  
14994      // If framework did not set this value, try to read from set prop.
14995      if (val == 0) {
14996          memset(prop, 0, sizeof(prop));
14997          property_get("persist.camera.instant.aec", prop, "0");
14998          val = (uint8_t)atoi(prop);
14999      }
15000  
15001      if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
15002             ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
15003          ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
15004          mInstantAEC = val;
15005          mInstantAECSettledFrameNumber = 0;
15006          mInstantAecFrameIdxCount = 0;
15007          LOGH("instantAEC value set %d",val);
15008          if (mInstantAEC) {
15009              memset(prop, 0, sizeof(prop));
15010              property_get("persist.camera.ae.instant.bound", prop, "10");
15011              int32_t aec_frame_skip_cnt = atoi(prop);
15012              if (aec_frame_skip_cnt >= 0) {
15013                  mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
15014              } else {
15015                  LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
15016                  rc = BAD_VALUE;
15017              }
15018          }
15019      } else {
15020          LOGE("Bad instant aec value set %d", val);
15021          rc = BAD_VALUE;
15022      }
15023      return rc;
15024  }
15025  
15026  /*===========================================================================
15027   * FUNCTION   : get_num_overall_buffers
15028   *
15029   * DESCRIPTION: Estimate number of pending buffers across all requests.
15030   *
15031   * PARAMETERS : None
15032   *
15033   * RETURN     : Number of overall pending buffers
15034   *
15035   *==========================================================================*/
get_num_overall_buffers()15036  uint32_t PendingBuffersMap::get_num_overall_buffers()
15037  {
15038      uint32_t sum_buffers = 0;
15039      for (auto &req : mPendingBuffersInRequest) {
15040          sum_buffers += req.mPendingBufferList.size();
15041      }
15042      return sum_buffers;
15043  }
15044  
15045  /*===========================================================================
15046   * FUNCTION   : removeBuf
15047   *
15048   * DESCRIPTION: Remove a matching buffer from tracker.
15049   *
15050   * PARAMETERS : @buffer: image buffer for the callback
15051   *
15052   * RETURN     : None
15053   *
15054   *==========================================================================*/
removeBuf(buffer_handle_t * buffer)15055  void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
15056  {
15057      bool buffer_found = false;
15058      for (auto req = mPendingBuffersInRequest.begin();
15059              req != mPendingBuffersInRequest.end(); req++) {
15060          for (auto k = req->mPendingBufferList.begin();
15061                  k != req->mPendingBufferList.end(); k++ ) {
15062              if (k->buffer == buffer) {
15063                  LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
15064                          req->frame_number, buffer);
15065                  k = req->mPendingBufferList.erase(k);
15066                  if (req->mPendingBufferList.empty()) {
15067                      // Remove this request from Map
15068                      req = mPendingBuffersInRequest.erase(req);
15069                  }
15070                  buffer_found = true;
15071                  break;
15072              }
15073          }
15074          if (buffer_found) {
15075              break;
15076          }
15077      }
15078      LOGD("mPendingBuffersMap.num_overall_buffers = %d",
15079              get_num_overall_buffers());
15080  }
15081  
15082  /*===========================================================================
15083   * FUNCTION   : getBufErrStatus
15084   *
15085   * DESCRIPTION: get buffer error status
15086   *
15087   * PARAMETERS : @buffer: buffer handle
15088   *
15089   * RETURN     : Error status
15090   *
15091   *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)15092  int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
15093  {
15094      for (auto& req : mPendingBuffersInRequest) {
15095          for (auto& k : req.mPendingBufferList) {
15096              if (k.buffer == buffer)
15097                  return k.bufStatus;
15098          }
15099      }
15100      return CAMERA3_BUFFER_STATUS_OK;
15101  }
15102  
15103  /*===========================================================================
15104   * FUNCTION   : setPAAFSupport
15105   *
15106   * DESCRIPTION: Set the preview-assisted auto focus support bit in
15107   *              feature mask according to stream type and filter
15108   *              arrangement
15109   *
15110   * PARAMETERS : @feature_mask: current feature mask, which may be modified
15111   *              @stream_type: stream type
15112   *              @filter_arrangement: filter arrangement
15113   *
15114   * RETURN     : None
15115   *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)15116  void QCamera3HardwareInterface::setPAAFSupport(
15117          cam_feature_mask_t& feature_mask,
15118          cam_stream_type_t stream_type,
15119          cam_color_filter_arrangement_t filter_arrangement)
15120  {
15121      switch (filter_arrangement) {
15122      case CAM_FILTER_ARRANGEMENT_RGGB:
15123      case CAM_FILTER_ARRANGEMENT_GRBG:
15124      case CAM_FILTER_ARRANGEMENT_GBRG:
15125      case CAM_FILTER_ARRANGEMENT_BGGR:
15126          if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
15127                  (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
15128                  (stream_type == CAM_STREAM_TYPE_VIDEO)) {
15129              if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
15130                  feature_mask |= CAM_QCOM_FEATURE_PAAF;
15131          }
15132          break;
15133      case CAM_FILTER_ARRANGEMENT_Y:
15134          if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
15135              feature_mask |= CAM_QCOM_FEATURE_PAAF;
15136          }
15137          break;
15138      default:
15139          break;
15140      }
15141      LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
15142              feature_mask, stream_type, filter_arrangement);
15143  
15144  
15145  }
15146  
15147  /*===========================================================================
15148  * FUNCTION   : getSensorMountAngle
15149  *
15150  * DESCRIPTION: Retrieve sensor mount angle
15151  *
15152  * PARAMETERS : None
15153  *
15154  * RETURN     : sensor mount angle in uint32_t
15155  *==========================================================================*/
getSensorMountAngle()15156  uint32_t QCamera3HardwareInterface::getSensorMountAngle()
15157  {
15158      return gCamCapability[mCameraId]->sensor_mount_angle;
15159  }
15160  
15161  /*===========================================================================
15162  * FUNCTION   : getRelatedCalibrationData
15163  *
15164  * DESCRIPTION: Retrieve related system calibration data
15165  *
15166  * PARAMETERS : None
15167  *
15168  * RETURN     : Pointer of related system calibration data
15169  *==========================================================================*/
getRelatedCalibrationData()15170  const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
15171  {
15172      return (const cam_related_system_calibration_data_t *)
15173              &(gCamCapability[mCameraId]->related_cam_calibration);
15174  }
15175  
15176  /*===========================================================================
15177   * FUNCTION   : is60HzZone
15178   *
15179   * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
15180   *
15181   * PARAMETERS : None
15182   *
15183   * RETURN     : True if in 60Hz zone, False otherwise
15184   *==========================================================================*/
is60HzZone()15185  bool QCamera3HardwareInterface::is60HzZone()
15186  {
15187      time_t t = time(NULL);
15188      struct tm lt;
15189  
15190      struct tm* r = localtime_r(&t, &lt);
15191  
15192      if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
15193          return true;
15194      else
15195          return false;
15196  }
15197  
15198  /*===========================================================================
15199   * FUNCTION   : adjustBlackLevelForCFA
15200   *
15201   * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
15202   *              of bayer CFA (Color Filter Array).
15203   *
15204   * PARAMETERS : @input: black level pattern in the order of RGGB
15205   *              @output: black level pattern in the order of CFA
15206   *              @color_arrangement: CFA color arrangement
15207   *
15208   * RETURN     : None
15209   *==========================================================================*/
15210  template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)15211  void QCamera3HardwareInterface::adjustBlackLevelForCFA(
15212          T input[BLACK_LEVEL_PATTERN_CNT],
15213          T output[BLACK_LEVEL_PATTERN_CNT],
15214          cam_color_filter_arrangement_t color_arrangement)
15215  {
15216      switch (color_arrangement) {
15217      case CAM_FILTER_ARRANGEMENT_GRBG:
15218          output[0] = input[1];
15219          output[1] = input[0];
15220          output[2] = input[3];
15221          output[3] = input[2];
15222          break;
15223      case CAM_FILTER_ARRANGEMENT_GBRG:
15224          output[0] = input[2];
15225          output[1] = input[3];
15226          output[2] = input[0];
15227          output[3] = input[1];
15228          break;
15229      case CAM_FILTER_ARRANGEMENT_BGGR:
15230          output[0] = input[3];
15231          output[1] = input[2];
15232          output[2] = input[1];
15233          output[3] = input[0];
15234          break;
15235      case CAM_FILTER_ARRANGEMENT_RGGB:
15236          output[0] = input[0];
15237          output[1] = input[1];
15238          output[2] = input[2];
15239          output[3] = input[3];
15240          break;
15241      default:
15242          LOGE("Invalid color arrangement to derive dynamic blacklevel");
15243          break;
15244      }
15245  }
15246  
updateHdrPlusResultMetadata(CameraMetadata & resultMetadata,std::shared_ptr<metadata_buffer_t> settings)15247  void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
15248      CameraMetadata &resultMetadata,
15249      std::shared_ptr<metadata_buffer_t> settings)
15250  {
15251      if (settings == nullptr) {
15252          ALOGE("%s: settings is nullptr.", __FUNCTION__);
15253          return;
15254      }
15255  
15256      IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
15257          resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
15258      } else {
15259          resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
15260      }
15261  
15262      IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
15263          String8 str((const char *)gps_methods);
15264          resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
15265      } else {
15266          resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
15267      }
15268  
15269      IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
15270          resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
15271      } else {
15272          resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
15273      }
15274  
15275      IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
15276          resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
15277      } else {
15278          resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
15279      }
15280  
15281      IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
15282          uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
15283          resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
15284      } else {
15285          resultMetadata.erase(ANDROID_JPEG_QUALITY);
15286      }
15287  
15288      IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
15289          uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
15290          resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
15291      } else {
15292          resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
15293      }
15294  
15295      IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
15296          int32_t fwk_thumb_size[2];
15297          fwk_thumb_size[0] = thumb_size->width;
15298          fwk_thumb_size[1] = thumb_size->height;
15299          resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
15300      } else {
15301          resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
15302      }
15303  
15304      IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
15305          uint8_t fwk_intent = intent[0];
15306          resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
15307      } else {
15308          resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
15309      }
15310  }
15311  
isRequestHdrPlusCompatible(const camera3_capture_request_t & request,const CameraMetadata & metadata)15312  bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
15313          const camera3_capture_request_t &request, const CameraMetadata &metadata) {
15314      if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
15315              metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
15316          ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
15317          return false;
15318      }
15319  
15320      if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
15321           metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
15322              ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
15323          ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
15324                  metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
15325          return false;
15326      }
15327  
15328      if (!metadata.exists(ANDROID_EDGE_MODE) ||
15329              metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
15330          ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
15331          return false;
15332      }
15333  
15334      if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
15335              metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
15336                      ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
15337          ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
15338          return false;
15339      }
15340  
15341      if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
15342              (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
15343               metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
15344                      ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
15345          ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15346          return false;
15347      }
15348  
15349      if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15350              metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15351          ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15352          return false;
15353      }
15354  
15355      if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15356              metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15357                      ANDROID_CONTROL_EFFECT_MODE_OFF) {
15358          ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15359          return false;
15360      }
15361  
15362      if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15363              (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15364               metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15365                      ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15366          ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15367          return false;
15368      }
15369  
15370      // TODO (b/32585046): support non-ZSL.
15371      if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15372           metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15373          ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15374          return false;
15375      }
15376  
15377      // TODO (b/32586081): support flash.
15378      if (!metadata.exists(ANDROID_FLASH_MODE) ||
15379           metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15380          ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15381          return false;
15382      }
15383  
15384      if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15385           metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15386          ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15387          return false;
15388      }
15389  
15390      switch (request.output_buffers[0].stream->format) {
15391          case HAL_PIXEL_FORMAT_BLOB:
15392          case HAL_PIXEL_FORMAT_YCbCr_420_888:
15393          case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15394              break;
15395          default:
15396              ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15397              for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15398                  ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15399                          request.output_buffers[0].stream->width,
15400                          request.output_buffers[0].stream->height,
15401                          request.output_buffers[0].stream->format);
15402              }
15403              return false;
15404      }
15405  
15406      return true;
15407  }
15408  
abortPendingHdrplusRequest(HdrPlusPendingRequest * hdrPlusRequest)15409  void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15410      if (hdrPlusRequest == nullptr) return;
15411  
15412      for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15413          // Find the stream for this buffer.
15414          for (auto streamInfo : mStreamInfo) {
15415              if (streamInfo->id == outputBufferIter.first) {
15416                  if (streamInfo->channel == mPictureChannel) {
15417                      // For picture channel, this buffer is internally allocated so return this
15418                      // buffer to picture channel.
15419                      mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15420                  } else {
15421                      // Unregister this buffer for other channels.
15422                      streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15423                  }
15424                  break;
15425              }
15426          }
15427      }
15428  
15429      hdrPlusRequest->outputBuffers.clear();
15430      hdrPlusRequest->frameworkOutputBuffers.clear();
15431  }
15432  
trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest * hdrPlusRequest,const camera3_capture_request_t & request,const CameraMetadata & metadata)15433  bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15434          HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15435          const CameraMetadata &metadata)
15436  {
15437      if (hdrPlusRequest == nullptr) return false;
15438      if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15439  
15440      status_t res = OK;
15441      pbcamera::CaptureRequest pbRequest;
15442      pbRequest.id = request.frame_number;
15443      // Iterate through all requested output buffers and add them to an HDR+ request.
15444      for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15445          // Find the index of the stream in mStreamInfo.
15446          uint32_t pbStreamId = 0;
15447          bool found = false;
15448          for (auto streamInfo : mStreamInfo) {
15449              if (streamInfo->stream == request.output_buffers[i].stream) {
15450                  pbStreamId = streamInfo->id;
15451                  found = true;
15452                  break;
15453              }
15454          }
15455  
15456          if (!found) {
15457              ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15458              abortPendingHdrplusRequest(hdrPlusRequest);
15459              return false;
15460          }
15461          auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15462          switch (request.output_buffers[i].stream->format) {
15463              case HAL_PIXEL_FORMAT_BLOB:
15464              {
15465                  // For jpeg output, get a YUV buffer from pic channel.
15466                  QCamera3PicChannel *picChannel =
15467                          (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15468                  res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15469                  if (res != OK) {
15470                      ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15471                              __FUNCTION__, strerror(-res), res);
15472                      abortPendingHdrplusRequest(hdrPlusRequest);
15473                      return false;
15474                  }
15475                  break;
15476              }
15477              case HAL_PIXEL_FORMAT_YCbCr_420_888:
15478              case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15479              {
15480                  // For YUV output, register the buffer and get the buffer def from the channel.
15481                  QCamera3ProcessingChannel *channel =
15482                          (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15483                  res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15484                          outBuffer.get());
15485                  if (res != OK) {
15486                      ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15487                              strerror(-res), res);
15488                      abortPendingHdrplusRequest(hdrPlusRequest);
15489                      return false;
15490                  }
15491                  break;
15492              }
15493              default:
15494                  abortPendingHdrplusRequest(hdrPlusRequest);
15495                  return false;
15496          }
15497  
15498          pbcamera::StreamBuffer buffer;
15499          buffer.streamId = pbStreamId;
15500          buffer.dmaBufFd = outBuffer->fd;
15501          buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15502          buffer.dataSize = outBuffer->frame_len;
15503  
15504          pbRequest.outputBuffers.push_back(buffer);
15505  
15506          hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15507          hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15508      }
15509  
15510      // Submit an HDR+ capture request to HDR+ service.
15511      res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
15512      if (res != OK) {
15513          ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15514                  strerror(-res), res);
15515          abortPendingHdrplusRequest(hdrPlusRequest);
15516          return false;
15517      }
15518  
15519      return true;
15520  }
15521  
openHdrPlusClientAsyncLocked()15522  status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15523  {
15524      if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15525          return OK;
15526      }
15527  
15528      status_t res = gEaselManagerClient->openHdrPlusClientAsync(mQCamera3HdrPlusListenerThread.get());
15529      if (res != OK) {
15530          ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15531                  strerror(-res), res);
15532          return res;
15533      }
15534      gHdrPlusClientOpening = true;
15535  
15536      return OK;
15537  }
15538  
enableHdrPlusModeLocked()15539  status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15540  {
15541      status_t res;
15542  
15543      if (mHdrPlusModeEnabled) {
15544          return OK;
15545      }
15546  
15547      // Check if gHdrPlusClient is opened or being opened.
15548      if (gHdrPlusClient == nullptr) {
15549          if (gHdrPlusClientOpening) {
15550              // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15551              return OK;
15552          }
15553  
15554          res = openHdrPlusClientAsyncLocked();
15555          if (res != OK) {
15556              ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15557                      strerror(-res), res);
15558              return res;
15559          }
15560  
15561          // When opening HDR+ client completes, HDR+ mode will be enabled.
15562          return OK;
15563  
15564      }
15565  
15566      // Configure stream for HDR+.
15567      res = configureHdrPlusStreamsLocked();
15568      if (res != OK) {
15569          LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
15570          return res;
15571      }
15572  
15573      // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15574      res = gHdrPlusClient->setZslHdrPlusMode(true);
15575      if (res != OK) {
15576          LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15577          return res;
15578      }
15579  
15580      mHdrPlusModeEnabled = true;
15581      ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15582  
15583      return OK;
15584  }
15585  
finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> & lock)15586  void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15587  {
15588      if (gHdrPlusClientOpening) {
15589          gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15590      }
15591  }
15592  
disableHdrPlusModeLocked()15593  void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15594  {
15595      // Disable HDR+ mode.
15596      if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
15597          status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15598          if (res != OK) {
15599              ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15600          }
15601  
15602          // Close HDR+ client so Easel can enter low power mode.
15603          gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
15604          gHdrPlusClient = nullptr;
15605      }
15606  
15607      mHdrPlusModeEnabled = false;
15608      ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15609  }
15610  
isSessionHdrPlusModeCompatible()15611  bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15612  {
15613      // Check that at least one YUV or one JPEG output is configured.
15614      // TODO: Support RAW (b/36690506)
15615      for (auto streamInfo : mStreamInfo) {
15616          if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15617              if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15618                      (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15619                       streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15620                       streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15621                  return true;
15622              }
15623          }
15624      }
15625  
15626      return false;
15627  }
15628  
configureHdrPlusStreamsLocked()15629  status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
15630  {
15631      pbcamera::InputConfiguration inputConfig;
15632      std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15633      status_t res = OK;
15634  
15635      // Sensor MIPI will send data to Easel.
15636      inputConfig.isSensorInput = true;
15637      inputConfig.sensorMode.cameraId = mCameraId;
15638      inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15639      inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15640      inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15641      inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15642      inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15643      inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15644      inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15645  
15646      if (mSensorModeInfo.num_raw_bits != 10) {
15647          ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15648                  mSensorModeInfo.num_raw_bits);
15649          return BAD_VALUE;
15650      }
15651  
15652      inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
15653  
15654      // Iterate through configured output streams in HAL and configure those streams in HDR+
15655      // service.
15656      for (auto streamInfo : mStreamInfo) {
15657          pbcamera::StreamConfiguration outputConfig;
15658          if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15659              switch (streamInfo->stream->format) {
15660                  case HAL_PIXEL_FORMAT_BLOB:
15661                  case HAL_PIXEL_FORMAT_YCbCr_420_888:
15662                  case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15663                      res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15664                              streamInfo->channel, /*stream index*/0);
15665                      if (res != OK) {
15666                          LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15667                              __FUNCTION__, strerror(-res), res);
15668  
15669                          return res;
15670                      }
15671  
15672                      outputStreamConfigs.push_back(outputConfig);
15673                      break;
15674                  default:
15675                      // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15676                      break;
15677              }
15678          }
15679      }
15680  
15681      res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
15682      if (res != OK) {
15683          LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15684              strerror(-res), res);
15685          return res;
15686      }
15687  
15688      return OK;
15689  }
15690  
handleEaselFatalError()15691  void QCamera3HardwareInterface::handleEaselFatalError()
15692  {
15693      {
15694          std::unique_lock<std::mutex> l(gHdrPlusClientLock);
15695          if (gHdrPlusClient != nullptr) {
15696              gHdrPlusClient->nofityEaselFatalError();
15697          }
15698      }
15699  
15700      pthread_mutex_lock(&mMutex);
15701      mState = ERROR;
15702      pthread_mutex_unlock(&mMutex);
15703  
15704      handleCameraDeviceError(/*stopChannelImmediately*/true);
15705  }
15706  
cleanupEaselErrorFuture()15707  void QCamera3HardwareInterface::cleanupEaselErrorFuture()
15708  {
15709      {
15710          std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
15711          if (!mEaselErrorFuture.valid()) {
15712              // If there is no Easel error, construct a dummy future to wait for.
15713              mEaselErrorFuture = std::async([]() { return; });
15714          }
15715      }
15716  
15717      mEaselErrorFuture.wait();
15718  }
15719  
handleEaselFatalErrorAsync()15720  void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15721  {
15722      std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
15723  
15724      if (mEaselErrorFuture.valid()) {
15725          // The error future has been invoked.
15726          return;
15727      }
15728  
15729      // Launch a future to handle the fatal error.
15730      mEaselErrorFuture = std::async(std::launch::async,
15731              &QCamera3HardwareInterface::handleEaselFatalError, this);
15732  }
15733  
onEaselFatalError(std::string errMsg)15734  void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15735  {
15736      ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15737      handleEaselFatalErrorAsync();
15738  }
15739  
onOpened(std::unique_ptr<HdrPlusClient> client)15740  void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15741  {
15742      int rc = NO_ERROR;
15743  
15744      if (client == nullptr) {
15745          ALOGE("%s: Opened client is null.", __FUNCTION__);
15746          return;
15747      }
15748  
15749      logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
15750      ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15751  
15752      std::unique_lock<std::mutex> l(gHdrPlusClientLock);
15753      if (!gHdrPlusClientOpening) {
15754          ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15755          return;
15756      }
15757  
15758      gHdrPlusClient = std::move(client);
15759      gHdrPlusClientOpening = false;
15760      gHdrPlusClientOpenCond.notify_one();
15761  
15762      // Set static metadata.
15763      status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15764      if (res != OK) {
15765          LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15766              __FUNCTION__, strerror(-res), res);
15767          gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
15768          gHdrPlusClient = nullptr;
15769          return;
15770      }
15771  
15772      // Enable HDR+ mode.
15773      res = enableHdrPlusModeLocked();
15774      if (res != OK) {
15775          LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15776      }
15777  
15778      // Get Easel firmware version
15779      if (EaselManagerClientOpened) {
15780          rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15781          if (rc != OK) {
15782              ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15783          } else {
15784              mEaselFwUpdated = true;
15785          }
15786      }
15787  }
15788  
onOpenFailed(status_t err)15789  void QCamera3HardwareInterface::onOpenFailed(status_t err)
15790  {
15791      ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
15792      std::unique_lock<std::mutex> l(gHdrPlusClientLock);
15793      gHdrPlusClientOpening = false;
15794      gHdrPlusClientOpenCond.notify_one();
15795  }
15796  
onFatalError()15797  void QCamera3HardwareInterface::onFatalError()
15798  {
15799      ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15800      handleEaselFatalErrorAsync();
15801  }
15802  
onShutter(uint32_t requestId,int64_t apSensorTimestampNs)15803  void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15804  {
15805      ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15806              __LINE__, requestId, apSensorTimestampNs);
15807  
15808      mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15809  }
15810  
onNextCaptureReady(uint32_t requestId)15811  void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15812  {
15813      pthread_mutex_lock(&mMutex);
15814  
15815      // Find the pending request for this result metadata.
15816      auto requestIter = mPendingRequestsList.begin();
15817      while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15818          requestIter++;
15819      }
15820  
15821      if (requestIter == mPendingRequestsList.end()) {
15822          ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15823          pthread_mutex_unlock(&mMutex);
15824          return;
15825      }
15826  
15827      requestIter->partial_result_cnt++;
15828  
15829      CameraMetadata metadata;
15830      uint8_t ready = true;
15831      metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15832  
15833      // Send it to framework.
15834      camera3_capture_result_t result = {};
15835  
15836      result.result = metadata.getAndLock();
15837      // Populate metadata result
15838      result.frame_number = requestId;
15839      result.num_output_buffers = 0;
15840      result.output_buffers = NULL;
15841      result.partial_result = requestIter->partial_result_cnt;
15842  
15843      orchestrateResult(&result);
15844      metadata.unlock(result.result);
15845  
15846      pthread_mutex_unlock(&mMutex);
15847  }
15848  
onPostview(uint32_t requestId,std::unique_ptr<std::vector<uint8_t>> postview,uint32_t width,uint32_t height,uint32_t stride,int32_t format)15849  void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15850          std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15851          uint32_t stride, int32_t format)
15852  {
15853      if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15854          ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15855                  __LINE__, width, height, requestId);
15856          char buf[FILENAME_MAX] = {};
15857          snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15858                  requestId, width, height);
15859  
15860          pbcamera::StreamConfiguration config = {};
15861          config.image.width = width;
15862          config.image.height = height;
15863          config.image.format = format;
15864  
15865          pbcamera::PlaneConfiguration plane = {};
15866          plane.stride = stride;
15867          plane.scanline = height;
15868  
15869          config.image.planes.push_back(plane);
15870  
15871          pbcamera::StreamBuffer buffer = {};
15872          buffer.streamId = 0;
15873          buffer.dmaBufFd = -1;
15874          buffer.data = postview->data();
15875          buffer.dataSize = postview->size();
15876  
15877          hdrplus_client_utils::writePpm(buf, config, buffer);
15878      }
15879  
15880      pthread_mutex_lock(&mMutex);
15881  
15882      // Find the pending request for this result metadata.
15883      auto requestIter = mPendingRequestsList.begin();
15884      while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15885          requestIter++;
15886      }
15887  
15888      if (requestIter == mPendingRequestsList.end()) {
15889          ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15890          pthread_mutex_unlock(&mMutex);
15891          return;
15892      }
15893  
15894      requestIter->partial_result_cnt++;
15895  
15896      CameraMetadata metadata;
15897      int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15898              static_cast<int32_t>(stride)};
15899      metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15900      metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15901  
15902      // Send it to framework.
15903      camera3_capture_result_t result = {};
15904  
15905      result.result = metadata.getAndLock();
15906      // Populate metadata result
15907      result.frame_number = requestId;
15908      result.num_output_buffers = 0;
15909      result.output_buffers = NULL;
15910      result.partial_result = requestIter->partial_result_cnt;
15911  
15912      orchestrateResult(&result);
15913      metadata.unlock(result.result);
15914  
15915      pthread_mutex_unlock(&mMutex);
15916  }
15917  
onCaptureResult(pbcamera::CaptureResult * result,const camera_metadata_t & resultMetadata)15918  void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
15919          const camera_metadata_t &resultMetadata)
15920  {
15921      if (result == nullptr) {
15922          ALOGE("%s: result is nullptr.", __FUNCTION__);
15923          return;
15924      }
15925  
15926      // Find the pending HDR+ request.
15927      HdrPlusPendingRequest pendingRequest;
15928      {
15929          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15930          auto req = mHdrPlusPendingRequests.find(result->requestId);
15931          pendingRequest = req->second;
15932      }
15933  
15934      // Update the result metadata with the settings of the HDR+ still capture request because
15935      // the result metadata belongs to a ZSL buffer.
15936      CameraMetadata metadata;
15937      metadata = &resultMetadata;
15938      updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15939      camera_metadata_t* updatedResultMetadata = metadata.release();
15940  
15941      uint32_t halSnapshotStreamId = 0;
15942      if (mPictureChannel != nullptr) {
15943          halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15944      }
15945  
15946      auto halMetadata = std::make_shared<metadata_buffer_t>();
15947      clear_metadata_buffer(halMetadata.get());
15948  
15949      // Convert updated result metadata to HAL metadata.
15950      status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15951              halSnapshotStreamId, /*minFrameDuration*/0);
15952      if (res != 0) {
15953          ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15954      }
15955  
15956      for (auto &outputBuffer : result->outputBuffers) {
15957          uint32_t streamId = outputBuffer.streamId;
15958  
15959          // Find the framework output buffer in the pending request.
15960          auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15961          if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15962              ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15963                      streamId);
15964              continue;
15965          }
15966  
15967          camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15968  
15969          // Find the channel for the output buffer.
15970          QCamera3ProcessingChannel *channel =
15971                  (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15972  
15973          // Find the output buffer def.
15974          auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15975          if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15976              ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15977              continue;
15978          }
15979  
15980          std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15981  
15982          // Check whether to dump the buffer.
15983          if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15984                  frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15985              // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15986              char prop[PROPERTY_VALUE_MAX];
15987              property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15988              bool dumpYuvOutput = atoi(prop);
15989  
15990              if (dumpYuvOutput) {
15991                  // Dump yuv buffer to a ppm file.
15992                  pbcamera::StreamConfiguration outputConfig;
15993                  status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15994                          channel, /*stream index*/0);
15995                  if (rc == OK) {
15996                      char buf[FILENAME_MAX] = {};
15997                      snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15998                              result->requestId, streamId,
15999                              outputConfig.image.width, outputConfig.image.height);
16000  
16001                      hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
16002                  } else {
16003                      LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
16004                              "%s (%d).", __FUNCTION__, strerror(-rc), rc);
16005                  }
16006              }
16007          }
16008  
16009          if (channel == mPictureChannel) {
16010              // Return the buffer to pic channel for encoding.
16011              mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
16012                      frameworkOutputBuffer->buffer, result->requestId,
16013                      halMetadata);
16014          } else {
16015              // Return the buffer to camera framework.
16016              pthread_mutex_lock(&mMutex);
16017              handleBufferWithLock(frameworkOutputBuffer, result->requestId);
16018              channel->unregisterBuffer(outputBufferDef.get());
16019              pthread_mutex_unlock(&mMutex);
16020          }
16021      }
16022  
16023      // Send HDR+ metadata to framework.
16024      {
16025          pthread_mutex_lock(&mMutex);
16026  
16027          // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
16028          handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
16029          pthread_mutex_unlock(&mMutex);
16030      }
16031  
16032      // Remove the HDR+ pending request.
16033      {
16034          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16035          auto req = mHdrPlusPendingRequests.find(result->requestId);
16036          mHdrPlusPendingRequests.erase(req);
16037      }
16038  }
16039  
onFailedCaptureResult(pbcamera::CaptureResult * failedResult)16040  void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
16041  {
16042      if (failedResult == nullptr) {
16043          ALOGE("%s: Got an empty failed result.", __FUNCTION__);
16044          return;
16045      }
16046  
16047      ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
16048  
16049      // Find the pending HDR+ request.
16050      HdrPlusPendingRequest pendingRequest;
16051      {
16052          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16053          auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16054          if (req == mHdrPlusPendingRequests.end()) {
16055              ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
16056              return;
16057          }
16058          pendingRequest = req->second;
16059      }
16060  
16061      for (auto &outputBuffer : failedResult->outputBuffers) {
16062          uint32_t streamId = outputBuffer.streamId;
16063  
16064          // Find the channel
16065          // Find the framework output buffer in the pending request.
16066          auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16067          if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16068              ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16069                      streamId);
16070              continue;
16071          }
16072  
16073          camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16074  
16075          // Find the channel for the output buffer.
16076          QCamera3ProcessingChannel *channel =
16077                  (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16078  
16079          // Find the output buffer def.
16080          auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16081          if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16082              ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16083              continue;
16084          }
16085  
16086          std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16087  
16088          if (channel == mPictureChannel) {
16089              // Return the buffer to pic channel.
16090              mPictureChannel->returnYuvBuffer(outputBufferDef.get());
16091          } else {
16092              channel->unregisterBuffer(outputBufferDef.get());
16093          }
16094      }
16095  
16096      // Remove the HDR+ pending request.
16097      {
16098          Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16099          auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16100          mHdrPlusPendingRequests.erase(req);
16101      }
16102  
16103      pthread_mutex_lock(&mMutex);
16104  
16105      // Find the pending buffers.
16106      auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
16107      while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16108          if (pendingBuffers->frame_number == failedResult->requestId) {
16109              break;
16110          }
16111          pendingBuffers++;
16112      }
16113  
16114      // Send out request errors for the pending buffers.
16115      if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16116          std::vector<camera3_stream_buffer_t> streamBuffers;
16117          for (auto &buffer : pendingBuffers->mPendingBufferList) {
16118              // Prepare a stream buffer.
16119              camera3_stream_buffer_t streamBuffer = {};
16120              streamBuffer.stream = buffer.stream;
16121              streamBuffer.buffer = buffer.buffer;
16122              streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
16123              streamBuffer.acquire_fence = -1;
16124              streamBuffer.release_fence = -1;
16125  
16126              // Send out request error event.
16127              camera3_notify_msg_t notify_msg = {};
16128              notify_msg.type = CAMERA3_MSG_ERROR;
16129              notify_msg.message.error.frame_number = pendingBuffers->frame_number;
16130              notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
16131              notify_msg.message.error.error_stream = buffer.stream;
16132  
16133              orchestrateNotify(&notify_msg);
16134              mOutputBufferDispatcher.markBufferReady(pendingBuffers->frame_number, streamBuffer);
16135          }
16136  
16137          mShutterDispatcher.clear(pendingBuffers->frame_number);
16138  
16139  
16140  
16141          // Remove pending buffers.
16142          mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
16143      }
16144  
16145      // Remove pending request.
16146      auto halRequest = mPendingRequestsList.begin();
16147      while (halRequest != mPendingRequestsList.end()) {
16148          if (halRequest->frame_number == failedResult->requestId) {
16149              mPendingRequestsList.erase(halRequest);
16150              break;
16151          }
16152          halRequest++;
16153      }
16154  
16155      pthread_mutex_unlock(&mMutex);
16156  }
16157  
readSensorCalibration(int activeArrayWidth,float poseRotation[4],float poseTranslation[3],float cameraIntrinsics[5],float radialDistortion[5])16158  bool QCamera3HardwareInterface::readSensorCalibration(
16159          int activeArrayWidth,
16160          float poseRotation[4], float poseTranslation[3],
16161          float cameraIntrinsics[5], float radialDistortion[5]) {
16162  
16163      const char* calibrationPath = "/persist/sensors/calibration/calibration.xml";
16164  
16165      using namespace tinyxml2;
16166  
16167      XMLDocument calibrationXml;
16168      XMLError err = calibrationXml.LoadFile(calibrationPath);
16169      if (err != XML_SUCCESS) {
16170          ALOGE("Unable to load calibration file '%s'. Error: %s",
16171                  calibrationPath, XMLDocument::ErrorIDToName(err));
16172          return false;
16173      }
16174      XMLElement *rig = calibrationXml.FirstChildElement("rig");
16175      if (rig == nullptr) {
16176          ALOGE("No 'rig' in calibration file");
16177          return false;
16178      }
16179      XMLElement *cam = rig->FirstChildElement("camera");
16180      XMLElement *camModel = nullptr;
16181      while (cam != nullptr) {
16182          camModel = cam->FirstChildElement("camera_model");
16183          if (camModel == nullptr) {
16184              ALOGE("No 'camera_model' in calibration file");
16185              return false;
16186          }
16187          int modelIndex = camModel->IntAttribute("index", -1);
16188          // Model index "0" has the calibration we need
16189          if (modelIndex == 0) {
16190              break;
16191          }
16192          cam = cam->NextSiblingElement("camera");
16193      }
16194      if (cam == nullptr) {
16195          ALOGE("No 'camera' in calibration file");
16196          return false;
16197      }
16198      const char *modelType = camModel->Attribute("type");
16199      if (modelType == nullptr || strcmp(modelType,"calibu_fu_fv_u0_v0_k1_k2_k3")) {
16200          ALOGE("Camera model is unknown type %s",
16201                  modelType ? modelType : "NULL");
16202          return false;
16203      }
16204      XMLElement *modelWidth = camModel->FirstChildElement("width");
16205      if (modelWidth == nullptr || modelWidth->GetText() == nullptr) {
16206          ALOGE("No camera model width in calibration file");
16207          return false;
16208      }
16209      int width = atoi(modelWidth->GetText());
16210      XMLElement *modelHeight = camModel->FirstChildElement("height");
16211      if (modelHeight == nullptr || modelHeight->GetText() == nullptr) {
16212          ALOGE("No camera model height in calibration file");
16213          return false;
16214      }
16215      int height = atoi(modelHeight->GetText());
16216      if (width <= 0 || height <= 0) {
16217          ALOGE("Bad model width or height in calibration file: %d x %d", width, height);
16218          return false;
16219      }
16220      ALOGI("Width: %d, Height: %d", width, height);
16221  
16222      XMLElement *modelParams = camModel->FirstChildElement("params");
16223      if (modelParams == nullptr) {
16224          ALOGE("No camera model params in calibration file");
16225          return false;
16226      }
16227      const char* paramText = modelParams->GetText();
16228      if (paramText == nullptr) {
16229          ALOGE("No parameters in params element in calibration file");
16230          return false;
16231      }
16232      ALOGI("Parameters: %s", paramText);
16233  
16234      // Parameter string is of the form "[ float; float; float ...]"
16235      float params[7];
16236      bool success = parseStringArray(paramText, params, 7);
16237      if (!success) {
16238          ALOGE("Malformed camera parameter string in calibration file");
16239          return false;
16240      }
16241  
16242      XMLElement *extCalib = rig->FirstChildElement("extrinsic_calibration");
16243      while (extCalib != nullptr) {
16244          int id = extCalib->IntAttribute("frame_B_id", -1);
16245          if (id == 0) {
16246              break;
16247          }
16248          extCalib = extCalib->NextSiblingElement("extrinsic_calibration");
16249      }
16250      if (extCalib == nullptr) {
16251          ALOGE("No 'extrinsic_calibration' in calibration file");
16252          return false;
16253      }
16254  
16255      XMLElement *q = extCalib->FirstChildElement("A_q_B");
16256      if (q == nullptr || q->GetText() == nullptr) {
16257          ALOGE("No extrinsic quarternion in calibration file");
16258          return false;
16259      }
16260      float rotation[4];
16261      success = parseStringArray(q->GetText(), rotation, 4);
16262      if (!success) {
16263          ALOGE("Malformed extrinsic quarternion string in calibration file");
16264          return false;
16265      }
16266  
16267      XMLElement *p = extCalib->FirstChildElement("A_p_B");
16268      if (p == nullptr || p->GetText() == nullptr) {
16269          ALOGE("No extrinsic translation in calibration file");
16270          return false;
16271      }
16272      float position[3];
16273      success = parseStringArray(p->GetText(), position, 3);
16274      if (!success) {
16275          ALOGE("Malformed extrinsic position string in calibration file");
16276          return false;
16277      }
16278  
16279      // Map from width x height to active array
16280      float scaleFactor = static_cast<float>(activeArrayWidth) / width;
16281  
16282      cameraIntrinsics[0] = params[0] * scaleFactor; // fu -> f_x
16283      cameraIntrinsics[1] = params[1] * scaleFactor; // fv -> f_y
16284      cameraIntrinsics[2] = params[2] * scaleFactor; // u0 -> c_x
16285      cameraIntrinsics[3] = params[3] * scaleFactor; // v0 -> c_y
16286      cameraIntrinsics[4] = 0; // s = 0
16287  
16288      radialDistortion[0] = params[4]; // k1 -> k_1
16289      radialDistortion[1] = params[5]; // k2 -> k_2
16290      radialDistortion[2] = params[6]; // k3 -> k_3
16291      radialDistortion[3] = 0; // k_4 = 0
16292      radialDistortion[4] = 0; // k_5 = 0
16293  
16294      for (int i = 0; i < 4; i++) {
16295          poseRotation[i] = rotation[i];
16296      }
16297      for (int i = 0; i < 3; i++) {
16298          poseTranslation[i] = position[i];
16299      }
16300  
16301      ALOGI("Intrinsics: %f, %f, %f, %f, %f", cameraIntrinsics[0],
16302              cameraIntrinsics[1], cameraIntrinsics[2],
16303              cameraIntrinsics[3], cameraIntrinsics[4]);
16304      ALOGI("Distortion: %f, %f, %f, %f, %f",
16305              radialDistortion[0], radialDistortion[1], radialDistortion[2], radialDistortion[3],
16306              radialDistortion[4]);
16307      ALOGI("Pose rotation: %f, %f, %f, %f",
16308              poseRotation[0], poseRotation[1], poseRotation[2], poseRotation[3]);
16309      ALOGI("Pose translation: %f, %f, %f",
16310              poseTranslation[0], poseTranslation[1], poseTranslation[2]);
16311  
16312      return true;
16313  }
16314  
parseStringArray(const char * str,float * dest,int count)16315  bool QCamera3HardwareInterface::parseStringArray(const char *str, float *dest, int count) {
16316      size_t idx = 0;
16317      size_t len = strlen(str);
16318      for (; idx < len; idx++) {
16319          if (str[idx] == '[') break;
16320      }
16321      const char *startParam = str + idx + 1;
16322      if (startParam >= str + len) {
16323          ALOGE("Malformed array: %s", str);
16324          return false;
16325      }
16326      char *endParam = nullptr;
16327      for (int i = 0; i < count; i++) {
16328          dest[i] = strtod(startParam, &endParam);
16329          if (startParam == endParam) {
16330              ALOGE("Malformed array, index %d: %s", i, str);
16331              return false;
16332          }
16333          startParam = endParam + 1;
16334          if (startParam >= str + len) {
16335              ALOGE("Malformed array, index %d: %s", i, str);
16336              return false;
16337          }
16338      }
16339      return true;
16340  }
16341  
ShutterDispatcher(QCamera3HardwareInterface * parent)16342  ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
16343          mParent(parent) {}
16344  
expectShutter(uint32_t frameNumber,bool isReprocess)16345  void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
16346  {
16347      std::lock_guard<std::mutex> lock(mLock);
16348  
16349      if (isReprocess) {
16350          mReprocessShutters.emplace(frameNumber, Shutter());
16351      } else {
16352          mShutters.emplace(frameNumber, Shutter());
16353      }
16354  }
16355  
markShutterReady(uint32_t frameNumber,uint64_t timestamp)16356  void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
16357  {
16358      std::lock_guard<std::mutex> lock(mLock);
16359  
16360      std::map<uint32_t, Shutter> *shutters = nullptr;
16361  
16362      // Find the shutter entry.
16363      auto shutter = mShutters.find(frameNumber);
16364      if (shutter == mShutters.end()) {
16365          shutter = mReprocessShutters.find(frameNumber);
16366          if (shutter == mReprocessShutters.end()) {
16367              // Shutter was already sent.
16368              return;
16369          }
16370          shutters = &mReprocessShutters;
16371      } else {
16372          shutters = &mShutters;
16373      }
16374  
16375      if (shutter->second.ready) {
16376          // If shutter is already ready, don't update timestamp again.
16377          return;
16378      }
16379  
16380      // Make this frame's shutter ready.
16381      shutter->second.ready = true;
16382      shutter->second.timestamp = timestamp;
16383  
16384      // Iterate throught the shutters and send out shuters until the one that's not ready yet.
16385      shutter = shutters->begin();
16386      while (shutter != shutters->end()) {
16387          if (!shutter->second.ready) {
16388              // If this shutter is not ready, the following shutters can't be sent.
16389              break;
16390          }
16391  
16392          camera3_notify_msg_t msg = {};
16393          msg.type = CAMERA3_MSG_SHUTTER;
16394          msg.message.shutter.frame_number = shutter->first;
16395          msg.message.shutter.timestamp = shutter->second.timestamp;
16396          mParent->orchestrateNotify(&msg);
16397  
16398          shutter = shutters->erase(shutter);
16399      }
16400  }
16401  
clear(uint32_t frameNumber)16402  void ShutterDispatcher::clear(uint32_t frameNumber)
16403  {
16404      std::lock_guard<std::mutex> lock(mLock);
16405      mShutters.erase(frameNumber);
16406      mReprocessShutters.erase(frameNumber);
16407  }
16408  
clear()16409  void ShutterDispatcher::clear()
16410  {
16411      std::lock_guard<std::mutex> lock(mLock);
16412  
16413      // Log errors for stale shutters.
16414      for (auto &shutter : mShutters) {
16415          ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
16416              __FUNCTION__, shutter.first, shutter.second.ready,
16417              shutter.second.timestamp);
16418      }
16419  
16420      // Log errors for stale reprocess shutters.
16421      for (auto &shutter : mReprocessShutters) {
16422          ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
16423              __FUNCTION__, shutter.first, shutter.second.ready,
16424              shutter.second.timestamp);
16425      }
16426  
16427      mShutters.clear();
16428      mReprocessShutters.clear();
16429  }
16430  
OutputBufferDispatcher(QCamera3HardwareInterface * parent)16431  OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
16432          mParent(parent) {}
16433  
configureStreams(camera3_stream_configuration_t * streamList)16434  status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
16435  {
16436      std::lock_guard<std::mutex> lock(mLock);
16437      mStreamBuffers.clear();
16438      if (!streamList) {
16439          ALOGE("%s: streamList is nullptr.", __FUNCTION__);
16440          return -EINVAL;
16441      }
16442  
16443      // Create a "frame-number -> buffer" map for each stream.
16444      for (uint32_t i = 0; i < streamList->num_streams; i++) {
16445          mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
16446      }
16447  
16448      return OK;
16449  }
16450  
expectBuffer(uint32_t frameNumber,camera3_stream_t * stream)16451  status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
16452  {
16453      std::lock_guard<std::mutex> lock(mLock);
16454  
16455      // Find the "frame-number -> buffer" map for the stream.
16456      auto buffers = mStreamBuffers.find(stream);
16457      if (buffers == mStreamBuffers.end()) {
16458          ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
16459          return -EINVAL;
16460      }
16461  
16462      // Create an unready buffer for this frame number.
16463      buffers->second.emplace(frameNumber, Buffer());
16464      return OK;
16465  }
16466  
markBufferReady(uint32_t frameNumber,const camera3_stream_buffer_t & buffer)16467  void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
16468          const camera3_stream_buffer_t &buffer)
16469  {
16470      std::lock_guard<std::mutex> lock(mLock);
16471  
16472      // Find the frame number -> buffer map for the stream.
16473      auto buffers = mStreamBuffers.find(buffer.stream);
16474      if (buffers == mStreamBuffers.end()) {
16475          ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
16476          return;
16477      }
16478  
16479      // Find the unready buffer this frame number and mark it ready.
16480      auto pendingBuffer = buffers->second.find(frameNumber);
16481      if (pendingBuffer == buffers->second.end()) {
16482          ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
16483          return;
16484      }
16485  
16486      pendingBuffer->second.ready = true;
16487      pendingBuffer->second.buffer = buffer;
16488  
16489      // Iterate through the buffers and send out buffers until the one that's not ready yet.
16490      pendingBuffer = buffers->second.begin();
16491      while (pendingBuffer != buffers->second.end()) {
16492          if (!pendingBuffer->second.ready) {
16493              // If this buffer is not ready, the following buffers can't be sent.
16494              break;
16495          }
16496  
16497          camera3_capture_result_t result = {};
16498          result.frame_number = pendingBuffer->first;
16499          result.num_output_buffers = 1;
16500          result.output_buffers = &pendingBuffer->second.buffer;
16501  
16502          // Send out result with buffer errors.
16503          mParent->orchestrateResult(&result);
16504  
16505          pendingBuffer = buffers->second.erase(pendingBuffer);
16506      }
16507  }
16508  
clear(bool clearConfiguredStreams)16509  void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
16510  {
16511      std::lock_guard<std::mutex> lock(mLock);
16512  
16513      // Log errors for stale buffers.
16514      for (auto &buffers : mStreamBuffers) {
16515          for (auto &buffer : buffers.second) {
16516              ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
16517                  __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
16518          }
16519          buffers.second.clear();
16520      }
16521  
16522      if (clearConfiguredStreams) {
16523          mStreamBuffers.clear();
16524      }
16525  }
16526  
16527  }; //end namespace qcamera
16528