1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33 
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <ui/Fence.h>
46 #include <gralloc_priv.h>
47 #include "QCamera3HWI.h"
48 #include "QCamera3Mem.h"
49 #include "QCamera3Channel.h"
50 #include "QCamera3PostProc.h"
51 #include "QCamera3VendorTags.h"
52 
53 using namespace android;
54 
55 namespace qcamera {
56 
57 #define MAX(a, b) ((a) > (b) ? (a) : (b))
58 
59 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
60 
61 #define EMPTY_PIPELINE_DELAY 2
62 #define PARTIAL_RESULT_COUNT 2
63 #define FRAME_SKIP_DELAY     0
64 #define CAM_MAX_SYNC_LATENCY 4
65 
66 #define VIDEO_4K_WIDTH  3840
67 #define VIDEO_4K_HEIGHT 2160
68 
69 #define MAX_RAW_STREAMS        1
70 #define MAX_STALLING_STREAMS   1
71 #define MAX_PROCESSED_STREAMS  3
72 
73 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
74 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
75 volatile uint32_t gCamHal3LogLevel = 1;
76 
77 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
78     {"On",  CAM_CDS_MODE_ON},
79     {"Off", CAM_CDS_MODE_OFF},
80     {"Auto",CAM_CDS_MODE_AUTO}
81 };
82 
83 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
84     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
85     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
86     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
87     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
88     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
89     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
90     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
91     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
92     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
93 };
94 
95 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
96     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
97     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
98     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
99     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
100     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
101     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
102     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
103     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
104     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
105 };
106 
107 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
108     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
109     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
110     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
111     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
112     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
113     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
114     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
115     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
116     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
117     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
118     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
119     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
120     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
121     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
122     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
123     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
124 };
125 
126 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
127     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
128     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
129     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
130     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
131     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
132     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
133     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
134 };
135 
136 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
137     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
138             CAM_COLOR_CORRECTION_ABERRATION_OFF },
139     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
140             CAM_COLOR_CORRECTION_ABERRATION_FAST },
141     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
142             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
143 };
144 
145 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
146     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
147     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
148     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
149     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
150 };
151 
152 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
153     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
154     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
155     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
156     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
157     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
158 };
159 
160 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
161     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
162     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
163     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
164 };
165 
166 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
167     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
168     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
169 };
170 
171 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
172     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
173       CAM_FOCUS_UNCALIBRATED },
174     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
175       CAM_FOCUS_APPROXIMATE },
176     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
177       CAM_FOCUS_CALIBRATED }
178 };
179 
180 const int32_t available_thumbnail_sizes[] = {0, 0,
181                                              176, 144,
182                                              320, 240,
183                                              432, 288,
184                                              480, 288,
185                                              512, 288,
186                                              512, 384};
187 
188 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
189     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
190     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
191     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
192     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
193     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
194 };
195 
196 /* Since there is no mapping for all the options some Android enum are not listed.
197  * Also, the order in this list is important because while mapping from HAL to Android it will
198  * traverse from lower to higher index which means that for HAL values that are map to different
199  * Android values, the traverse logic will select the first one found.
200  */
201 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
202     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
203     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
204     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
205     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
206     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
207     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
208     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
209     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
210     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
211     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
212     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
213     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
214     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
215     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
216     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
217     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
218 };
219 
220 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
221     initialize:                         QCamera3HardwareInterface::initialize,
222     configure_streams:                  QCamera3HardwareInterface::configure_streams,
223     register_stream_buffers:            NULL,
224     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
225     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
226     get_metadata_vendor_tag_ops:        NULL,
227     dump:                               QCamera3HardwareInterface::dump,
228     flush:                              QCamera3HardwareInterface::flush,
229     reserved:                           {0},
230 };
231 
232 /*===========================================================================
233  * FUNCTION   : QCamera3HardwareInterface
234  *
235  * DESCRIPTION: constructor of QCamera3HardwareInterface
236  *
237  * PARAMETERS :
238  *   @cameraId  : camera ID
239  *
240  * RETURN     : none
241  *==========================================================================*/
QCamera3HardwareInterface(int cameraId,const camera_module_callbacks_t * callbacks)242 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
243                         const camera_module_callbacks_t *callbacks)
244     : mCameraId(cameraId),
245       mCameraHandle(NULL),
246       mCameraOpened(false),
247       mCameraInitialized(false),
248       mCallbackOps(NULL),
249       mInputStream(NULL),
250       mMetadataChannel(NULL),
251       mPictureChannel(NULL),
252       mRawChannel(NULL),
253       mSupportChannel(NULL),
254       mRawDumpChannel(NULL),
255       mFirstRequest(false),
256       mFlush(false),
257       mParamHeap(NULL),
258       mParameters(NULL),
259       m_bIsVideo(false),
260       m_bIs4KVideo(false),
261       mEisEnable(0),
262       mLoopBackResult(NULL),
263       mMinProcessedFrameDuration(0),
264       mMinJpegFrameDuration(0),
265       mMinRawFrameDuration(0),
266       m_pPowerModule(NULL),
267       mMetaFrameCount(0),
268       mCallbacks(callbacks),
269       mCaptureIntent(0)
270 {
271     getLogLevel();
272     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
273     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
274     mCameraDevice.common.close = close_camera_device;
275     mCameraDevice.ops = &mCameraOps;
276     mCameraDevice.priv = this;
277     gCamCapability[cameraId]->version = CAM_HAL_V3;
278     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
279     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
280     gCamCapability[cameraId]->min_num_pp_bufs = 3;
281 
282     pthread_cond_init(&mRequestCond, NULL);
283     mPendingRequest = 0;
284     mCurrentRequestId = -1;
285     pthread_mutex_init(&mMutex, NULL);
286 
287     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
288         mDefaultMetadata[i] = NULL;
289 
290 #ifdef HAS_MULTIMEDIA_HINTS
291     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
292         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
293     }
294 #endif
295 
296     char prop[PROPERTY_VALUE_MAX];
297     property_get("persist.camera.raw.dump", prop, "0");
298     mEnableRawDump = atoi(prop);
299     if (mEnableRawDump)
300         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
301 }
302 
303 /*===========================================================================
304  * FUNCTION   : ~QCamera3HardwareInterface
305  *
306  * DESCRIPTION: destructor of QCamera3HardwareInterface
307  *
308  * PARAMETERS : none
309  *
310  * RETURN     : none
311  *==========================================================================*/
~QCamera3HardwareInterface()312 QCamera3HardwareInterface::~QCamera3HardwareInterface()
313 {
314     CDBG("%s: E", __func__);
315     /* We need to stop all streams before deleting any stream */
316 
317 
318     if (mRawDumpChannel) {
319         mRawDumpChannel->stop();
320     }
321 
322     // NOTE: 'camera3_stream_t *' objects are already freed at
323     //        this stage by the framework
324     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
325         it != mStreamInfo.end(); it++) {
326         QCamera3Channel *channel = (*it)->channel;
327         if (channel) {
328             channel->stop();
329         }
330     }
331     if (mSupportChannel)
332         mSupportChannel->stop();
333 
334     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
335         it != mStreamInfo.end(); it++) {
336         QCamera3Channel *channel = (*it)->channel;
337         if (channel)
338             delete channel;
339         free (*it);
340     }
341     if (mSupportChannel) {
342         delete mSupportChannel;
343         mSupportChannel = NULL;
344     }
345 
346     if (mRawDumpChannel) {
347         delete mRawDumpChannel;
348         mRawDumpChannel = NULL;
349     }
350     mPictureChannel = NULL;
351 
352     /* Clean up all channels */
353     if (mCameraInitialized) {
354         if (mMetadataChannel) {
355             mMetadataChannel->stop();
356             delete mMetadataChannel;
357             mMetadataChannel = NULL;
358         }
359         deinitParameters();
360     }
361 
362     if (mCameraOpened)
363         closeCamera();
364 
365     mPendingBuffersMap.mPendingBufferList.clear();
366     mPendingRequestsList.clear();
367     mPendingReprocessResultList.clear();
368 
369     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
370         if (mDefaultMetadata[i])
371             free_camera_metadata(mDefaultMetadata[i]);
372 
373     pthread_cond_destroy(&mRequestCond);
374 
375     pthread_mutex_destroy(&mMutex);
376     CDBG("%s: X", __func__);
377 }
378 
379 /*===========================================================================
380  * FUNCTION   : camEvtHandle
381  *
382  * DESCRIPTION: Function registered to mm-camera-interface to handle events
383  *
384  * PARAMETERS :
385  *   @camera_handle : interface layer camera handle
386  *   @evt           : ptr to event
387  *   @user_data     : user data ptr
388  *
389  * RETURN     : none
390  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)391 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
392                                           mm_camera_event_t *evt,
393                                           void *user_data)
394 {
395     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
396     camera3_notify_msg_t notify_msg;
397     if (obj && evt) {
398         switch(evt->server_event_type) {
399             case CAM_EVENT_TYPE_DAEMON_DIED:
400                 ALOGE("%s: Fatal, camera daemon died", __func__);
401                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
402                 notify_msg.type = CAMERA3_MSG_ERROR;
403                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
404                 notify_msg.message.error.error_stream = NULL;
405                 notify_msg.message.error.frame_number = 0;
406                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
407                 break;
408 
409             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
410                 CDBG("%s: HAL got request pull from Daemon", __func__);
411                 pthread_mutex_lock(&obj->mMutex);
412                 obj->mWokenUpByDaemon = true;
413                 obj->unblockRequestIfNecessary();
414                 pthread_mutex_unlock(&obj->mMutex);
415                 break;
416 
417             default:
418                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
419                         evt->server_event_type);
420                 break;
421         }
422     } else {
423         ALOGE("%s: NULL user_data/evt", __func__);
424     }
425 }
426 
427 /*===========================================================================
428  * FUNCTION   : openCamera
429  *
430  * DESCRIPTION: open camera
431  *
432  * PARAMETERS :
433  *   @hw_device  : double ptr for camera device struct
434  *
435  * RETURN     : int32_t type of status
436  *              NO_ERROR  -- success
437  *              none-zero failure code
438  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)439 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
440 {
441     int rc = 0;
442     if (mCameraOpened) {
443         *hw_device = NULL;
444         return PERMISSION_DENIED;
445     }
446 
447     rc = openCamera();
448     if (rc == 0) {
449         *hw_device = &mCameraDevice.common;
450     } else
451         *hw_device = NULL;
452 
453 #ifdef HAS_MULTIMEDIA_HINTS
454     if (rc == 0) {
455         if (m_pPowerModule) {
456             if (m_pPowerModule->powerHint) {
457                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
458                         (void *)"state=1");
459             }
460         }
461     }
462 #endif
463     return rc;
464 }
465 
466 /*===========================================================================
467  * FUNCTION   : openCamera
468  *
469  * DESCRIPTION: open camera
470  *
471  * PARAMETERS : none
472  *
473  * RETURN     : int32_t type of status
474  *              NO_ERROR  -- success
475  *              none-zero failure code
476  *==========================================================================*/
openCamera()477 int QCamera3HardwareInterface::openCamera()
478 {
479     int rc = 0;
480 
481     ATRACE_CALL();
482     if (mCameraHandle) {
483         ALOGE("Failure: Camera already opened");
484         return ALREADY_EXISTS;
485     }
486     mCameraHandle = camera_open(mCameraId);
487     if (!mCameraHandle) {
488         ALOGE("camera_open failed.");
489         return UNKNOWN_ERROR;
490     }
491 
492     mCameraOpened = true;
493 
494     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
495             camEvtHandle, (void *)this);
496 
497     if (rc < 0) {
498         ALOGE("%s: Error, failed to register event callback", __func__);
499         /* Not closing camera here since it is already handled in destructor */
500         return FAILED_TRANSACTION;
501     }
502 
503     return NO_ERROR;
504 }
505 
506 /*===========================================================================
507  * FUNCTION   : closeCamera
508  *
509  * DESCRIPTION: close camera
510  *
511  * PARAMETERS : none
512  *
513  * RETURN     : int32_t type of status
514  *              NO_ERROR  -- success
515  *              none-zero failure code
516  *==========================================================================*/
closeCamera()517 int QCamera3HardwareInterface::closeCamera()
518 {
519     ATRACE_CALL();
520     int rc = NO_ERROR;
521 
522     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
523     mCameraHandle = NULL;
524     mCameraOpened = false;
525 
526 #ifdef HAS_MULTIMEDIA_HINTS
527     if (rc == NO_ERROR) {
528         if (m_pPowerModule) {
529             if (m_pPowerModule->powerHint) {
530                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
531                         (void *)"state=0");
532             }
533         }
534     }
535 #endif
536 
537     return rc;
538 }
539 
540 /*===========================================================================
541  * FUNCTION   : initialize
542  *
543  * DESCRIPTION: Initialize frameworks callback functions
544  *
545  * PARAMETERS :
546  *   @callback_ops : callback function to frameworks
547  *
548  * RETURN     :
549  *
550  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)551 int QCamera3HardwareInterface::initialize(
552         const struct camera3_callback_ops *callback_ops)
553 {
554     ATRACE_CALL();
555     int rc;
556 
557     pthread_mutex_lock(&mMutex);
558 
559     rc = initParameters();
560     if (rc < 0) {
561         ALOGE("%s: initParamters failed %d", __func__, rc);
562        goto err1;
563     }
564     mCallbackOps = callback_ops;
565 
566     pthread_mutex_unlock(&mMutex);
567     mCameraInitialized = true;
568     return 0;
569 
570 err1:
571     pthread_mutex_unlock(&mMutex);
572     return rc;
573 }
574 
575 /*===========================================================================
576  * FUNCTION   : validateStreamDimensions
577  *
578  * DESCRIPTION: Check if the configuration requested are those advertised
579  *
580  * PARAMETERS :
581  *   @stream_list : streams to be configured
582  *
583  * RETURN     :
584  *
585  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)586 int QCamera3HardwareInterface::validateStreamDimensions(
587         camera3_stream_configuration_t *streamList)
588 {
589     int rc = NO_ERROR;
590     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
591     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
592     uint8_t jpeg_sizes_cnt = 0;
593 
594     /*
595     * Loop through all streams requested in configuration
596     * Check if unsupported sizes have been requested on any of them
597     */
598     for (size_t j = 0; j < streamList->num_streams; j++){
599         bool sizeFound = false;
600         camera3_stream_t *newStream = streamList->streams[j];
601 
602         /*
603         * Sizes are different for each type of stream format check against
604         * appropriate table.
605         */
606         switch (newStream->format) {
607         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
608         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
609         case HAL_PIXEL_FORMAT_RAW10:
610             for (int i = 0;
611                     i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
612                 if (gCamCapability[mCameraId]->raw_dim[i].width
613                         == (int32_t) newStream->width
614                     && gCamCapability[mCameraId]->raw_dim[i].height
615                         == (int32_t) newStream->height) {
616                     sizeFound = true;
617                     break;
618                 }
619             }
620             break;
621         case HAL_PIXEL_FORMAT_BLOB:
622             /* Generate JPEG sizes table */
623             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
624                     gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
625                     available_processed_sizes);
626             jpeg_sizes_cnt = filterJpegSizes(
627                     available_jpeg_sizes,
628                     available_processed_sizes,
629                     (gCamCapability[mCameraId]->picture_sizes_tbl_cnt) * 2,
630                     MAX_SIZES_CNT * 2,
631                     gCamCapability[mCameraId]->active_array_size,
632                     gCamCapability[mCameraId]->max_downscale_factor);
633 
634             /* Verify set size against generated sizes table */
635             for (int i = 0;i < jpeg_sizes_cnt/2; i++) {
636                 if ((int32_t)(newStream->width) == available_jpeg_sizes[i*2] &&
637                     (int32_t)(newStream->height) == available_jpeg_sizes[i*2+1]) {
638                     sizeFound = true;
639                     break;
640                 }
641             }
642             break;
643 
644 
645         case HAL_PIXEL_FORMAT_YCbCr_420_888:
646         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
647         default:
648             /* ZSL stream will be full active array size validate that*/
649             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
650                 if ((int32_t)(newStream->width) ==
651                     gCamCapability[mCameraId]->active_array_size.width
652                     && (int32_t)(newStream->height)  ==
653                     gCamCapability[mCameraId]->active_array_size.height) {
654                     sizeFound = true;
655                 }
656                 /* We could potentially break here to enforce ZSL stream
657                  * set from frameworks always has full active array size
658                  * but it is not clear from spec if framework will always
659                  * follow that, also we have logic to override to full array
660                  * size, so keeping this logic lenient at the moment.
661                  */
662             }
663 
664             /* Non ZSL stream still need to conform to advertised sizes*/
665             for (int i = 0;
666                 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
667                 if ((int32_t)(newStream->width) ==
668                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width
669                     && (int32_t)(newStream->height) ==
670                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height){
671                     sizeFound = true;
672                 break;
673                 }
674             }
675             break;
676         } /* End of switch(newStream->format) */
677 
678         /* We error out even if a single stream has unsupported size set */
679         if (!sizeFound) {
680             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
681                   "type:%d", __func__, newStream->width, newStream->height,
682                   newStream->format);
683             rc = -EINVAL;
684             break;
685         }
686     } /* End of for each stream */
687     return rc;
688 }
689 
690 /*==============================================================================
691  * FUNCTION   : isSupportChannelNeeded
692  *
693  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
694  *
695  * PARAMETERS :
696  *   @stream_list : streams to be configured
697  *
698  * RETURN     : Boolen true/false decision
699  *
700  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList)701 bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList)
702 {
703     uint32_t i;
704 
705     /* Dummy stream needed if only raw or jpeg streams present */
706     for (i = 0;i < streamList->num_streams;i++) {
707         switch(streamList->streams[i]->format) {
708             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
709             case HAL_PIXEL_FORMAT_RAW10:
710             case HAL_PIXEL_FORMAT_RAW16:
711             case HAL_PIXEL_FORMAT_BLOB:
712                 break;
713             default:
714                 return false;
715         }
716     }
717     return true;
718 }
719 
720 
721 /*===========================================================================
722  * FUNCTION   : configureStreams
723  *
724  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
725  *              and output streams.
726  *
727  * PARAMETERS :
728  *   @stream_list : streams to be configured
729  *
730  * RETURN     :
731  *
732  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)733 int QCamera3HardwareInterface::configureStreams(
734         camera3_stream_configuration_t *streamList)
735 {
736     ATRACE_CALL();
737     int rc = 0;
738 
739     // Sanity check stream_list
740     if (streamList == NULL) {
741         ALOGE("%s: NULL stream configuration", __func__);
742         return BAD_VALUE;
743     }
744     if (streamList->streams == NULL) {
745         ALOGE("%s: NULL stream list", __func__);
746         return BAD_VALUE;
747     }
748 
749     if (streamList->num_streams < 1) {
750         ALOGE("%s: Bad number of streams requested: %d", __func__,
751                 streamList->num_streams);
752         return BAD_VALUE;
753     }
754 
755     /* first invalidate all the steams in the mStreamList
756      * if they appear again, they will be validated */
757     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
758             it != mStreamInfo.end(); it++) {
759         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
760         channel->stop();
761         (*it)->status = INVALID;
762     }
763 
764     if (mRawDumpChannel) {
765         mRawDumpChannel->stop();
766         delete mRawDumpChannel;
767         mRawDumpChannel = NULL;
768     }
769 
770     if (mSupportChannel)
771         mSupportChannel->stop();
772     if (mMetadataChannel) {
773         /* If content of mStreamInfo is not 0, there is metadata stream */
774         mMetadataChannel->stop();
775     }
776 
777     pthread_mutex_lock(&mMutex);
778 
779     /* Check whether we have video stream */
780     m_bIs4KVideo = false;
781     m_bIsVideo = false;
782     bool isZsl = false;
783     size_t videoWidth = 0;
784     size_t videoHeight = 0;
785     size_t rawStreamCnt = 0;
786     size_t stallStreamCnt = 0;
787     size_t processedStreamCnt = 0;
788     // Number of streams on ISP encoder path
789     size_t numStreamsOnEncoder = 0;
790     cam_dimension_t maxViewfinderSize;
791     bool bJpegExceeds4K = false;
792     bool bUseCommonFeatureMask = false;
793     uint32_t commonFeatureMask = 0;
794     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
795 
796     for (size_t i = 0; i < streamList->num_streams; i++) {
797         camera3_stream_t *newStream = streamList->streams[i];
798         CDBG_HIGH("%s: stream[%d] type = %d, format = %d, width = %d, height = %d",
799                 __func__, i, newStream->stream_type, newStream->format,
800                 newStream->width, newStream->height);
801         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
802                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
803             isZsl = true;
804         }
805         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
806             if (newStream->width > VIDEO_4K_WIDTH ||
807                     newStream->height > VIDEO_4K_HEIGHT)
808                 bJpegExceeds4K = true;
809         }
810 
811         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
812                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
813             m_bIsVideo = true;
814 
815             if ((VIDEO_4K_WIDTH <= newStream->width) &&
816                     (VIDEO_4K_HEIGHT <= newStream->height)) {
817                 videoWidth = newStream->width;
818                 videoHeight = newStream->height;
819                 m_bIs4KVideo = true;
820             }
821         }
822         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
823                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
824             switch (newStream->format) {
825             case HAL_PIXEL_FORMAT_BLOB:
826                 stallStreamCnt++;
827                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
828                         newStream->height > (uint32_t)maxViewfinderSize.height) {
829                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
830                     numStreamsOnEncoder++;
831                 }
832                 break;
833             case HAL_PIXEL_FORMAT_RAW10:
834             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
835             case HAL_PIXEL_FORMAT_RAW16:
836                 rawStreamCnt++;
837                 break;
838             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
839                 processedStreamCnt++;
840                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
841                         newStream->height > (uint32_t)maxViewfinderSize.height) {
842                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
843                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
844                     } else {
845                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
846                     }
847                     numStreamsOnEncoder++;
848                 }
849                 break;
850             case HAL_PIXEL_FORMAT_YCbCr_420_888:
851             default:
852                 processedStreamCnt++;
853                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
854                         newStream->height > (uint32_t)maxViewfinderSize.height) {
855                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
856                     numStreamsOnEncoder++;
857                 }
858                 break;
859             }
860 
861         }
862     }
863 
864     /* Check if num_streams is sane */
865     if (stallStreamCnt > MAX_STALLING_STREAMS ||
866             rawStreamCnt > MAX_RAW_STREAMS ||
867             processedStreamCnt > MAX_PROCESSED_STREAMS) {
868         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
869                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
870         pthread_mutex_unlock(&mMutex);
871         return -EINVAL;
872     }
873     /* Check whether we have zsl stream or 4k video case */
874     if (isZsl && m_bIsVideo) {
875         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
876         pthread_mutex_unlock(&mMutex);
877         return -EINVAL;
878     }
879     /* Check if stream sizes are sane */
880     if (numStreamsOnEncoder > 2) {
881         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
882                 __func__);
883         pthread_mutex_unlock(&mMutex);
884         return -EINVAL;
885     } else if (1 < numStreamsOnEncoder){
886         bUseCommonFeatureMask = true;
887         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
888                 __func__);
889     }
890     /* Check if BLOB size is greater than 4k in 4k recording case */
891     if (m_bIs4KVideo && bJpegExceeds4K) {
892         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
893                 __func__);
894         pthread_mutex_unlock(&mMutex);
895         return -EINVAL;
896     }
897 
898     rc = validateStreamDimensions(streamList);
899     if (rc != NO_ERROR) {
900         ALOGE("%s: Invalid stream configuration requested!", __func__);
901         pthread_mutex_unlock(&mMutex);
902         return rc;
903     }
904 
905     camera3_stream_t *inputStream = NULL;
906     camera3_stream_t *jpegStream = NULL;
907     cam_stream_size_info_t stream_config_info;
908     for (size_t i = 0; i < streamList->num_streams; i++) {
909         camera3_stream_t *newStream = streamList->streams[i];
910         CDBG_HIGH("%s: newStream type = %d, stream format = %d stream size : %d x %d",
911                 __func__, newStream->stream_type, newStream->format,
912                  newStream->width, newStream->height);
913         //if the stream is in the mStreamList validate it
914         bool stream_exists = false;
915         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
916                 it != mStreamInfo.end(); it++) {
917             if ((*it)->stream == newStream) {
918                 QCamera3Channel *channel =
919                     (QCamera3Channel*)(*it)->stream->priv;
920                 stream_exists = true;
921                 delete channel;
922                 (*it)->status = VALID;
923                 (*it)->stream->priv = NULL;
924                 (*it)->channel = NULL;
925             }
926         }
927         if (!stream_exists) {
928             //new stream
929             stream_info_t* stream_info;
930             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
931             stream_info->stream = newStream;
932             stream_info->status = VALID;
933             stream_info->channel = NULL;
934             mStreamInfo.push_back(stream_info);
935         }
936         if (newStream->stream_type == CAMERA3_STREAM_INPUT
937                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
938             if (inputStream != NULL) {
939                 ALOGE("%s: Multiple input streams requested!", __func__);
940                 pthread_mutex_unlock(&mMutex);
941                 return BAD_VALUE;
942             }
943             inputStream = newStream;
944         }
945         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
946             jpegStream = newStream;
947         }
948     }
949     mInputStream = inputStream;
950 
951     cleanAndSortStreamInfo();
952     if (mMetadataChannel) {
953         delete mMetadataChannel;
954         mMetadataChannel = NULL;
955     }
956     if (mSupportChannel) {
957         delete mSupportChannel;
958         mSupportChannel = NULL;
959     }
960 
961     //Create metadata channel and initialize it
962     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
963                     mCameraHandle->ops, captureResultCb,
964                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
965     if (mMetadataChannel == NULL) {
966         ALOGE("%s: failed to allocate metadata channel", __func__);
967         rc = -ENOMEM;
968         pthread_mutex_unlock(&mMutex);
969         return rc;
970     }
971     rc = mMetadataChannel->initialize(IS_TYPE_NONE, mCaptureIntent);
972     if (rc < 0) {
973         ALOGE("%s: metadata channel initialization failed", __func__);
974         delete mMetadataChannel;
975         mMetadataChannel = NULL;
976         pthread_mutex_unlock(&mMutex);
977         return rc;
978     }
979 
980 
981     if (isSupportChannelNeeded(streamList)) {
982         mSupportChannel = new QCamera3SupportChannel(
983                 mCameraHandle->camera_handle,
984                 mCameraHandle->ops,
985                 &gCamCapability[mCameraId]->padding_info,
986                 CAM_QCOM_FEATURE_NONE,
987                 this);
988         if (!mSupportChannel) {
989             ALOGE("%s: dummy channel cannot be created", __func__);
990             pthread_mutex_unlock(&mMutex);
991             return -ENOMEM;
992         }
993     }
994 
995     bool isRawStreamRequested = false;
996     /* Allocate channel objects for the requested streams */
997     for (size_t i = 0; i < streamList->num_streams; i++) {
998         camera3_stream_t *newStream = streamList->streams[i];
999         uint32_t stream_usage = newStream->usage;
1000         stream_config_info.stream_sizes[i].width = newStream->width;
1001         stream_config_info.stream_sizes[i].height = newStream->height;
1002         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
1003             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
1004             //for zsl stream the size is jpeg stream size
1005             stream_config_info.stream_sizes[i].width = jpegStream->width;
1006             stream_config_info.stream_sizes[i].height = jpegStream->height;
1007             stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
1008             stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
1009         } else {
1010            //for non zsl streams find out the format
1011            switch (newStream->format) {
1012            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1013               {
1014                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1015                     stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
1016                  } else {
1017                     stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
1018                  }
1019                  stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
1020               }
1021               break;
1022            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1023               stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
1024               stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
1025               break;
1026            case HAL_PIXEL_FORMAT_BLOB:
1027               stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
1028               if (m_bIs4KVideo && !isZsl) {
1029                   stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
1030               } else {
1031                   if (bUseCommonFeatureMask &&
1032                           (newStream->width > (uint32_t)maxViewfinderSize.width ||
1033                                   newStream->height > (uint32_t)maxViewfinderSize.height)) {
1034                       stream_config_info.postprocess_mask[i] = commonFeatureMask;
1035                   } else {
1036                       stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
1037                   }
1038               }
1039               if (m_bIs4KVideo) {
1040                   stream_config_info.stream_sizes[i].width = videoWidth;
1041                   stream_config_info.stream_sizes[i].height = videoHeight;
1042               }
1043               break;
1044            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1045            case HAL_PIXEL_FORMAT_RAW16:
1046            case HAL_PIXEL_FORMAT_RAW10:
1047               stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
1048               isRawStreamRequested = true;
1049               break;
1050            default:
1051               stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
1052               stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
1053               break;
1054            }
1055         }
1056         if (newStream->priv == NULL) {
1057             //New stream, construct channel
1058             switch (newStream->stream_type) {
1059             case CAMERA3_STREAM_INPUT:
1060                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
1061                 break;
1062             case CAMERA3_STREAM_BIDIRECTIONAL:
1063                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
1064                     GRALLOC_USAGE_HW_CAMERA_WRITE;
1065                 break;
1066             case CAMERA3_STREAM_OUTPUT:
1067                 /* For video encoding stream, set read/write rarely
1068                  * flag so that they may be set to un-cached */
1069                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1070                     newStream->usage =
1071                          (GRALLOC_USAGE_SW_READ_RARELY |
1072                          GRALLOC_USAGE_SW_WRITE_RARELY |
1073                          GRALLOC_USAGE_HW_CAMERA_WRITE);
1074                 else
1075                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
1076                 break;
1077             default:
1078                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1079                 break;
1080             }
1081 
1082             if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1083                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
1084                     jpegStream) {
1085                 QCamera3Channel *channel = NULL;
1086                 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
1087                 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1088                         mCameraHandle->ops, captureResultCb,
1089                         &gCamCapability[mCameraId]->padding_info,
1090                         this,
1091                         newStream,
1092                         (cam_stream_type_t) stream_config_info.type[i],
1093                         stream_config_info.postprocess_mask[i],
1094                         jpegStream->width, jpegStream->height);
1095                     if (channel == NULL) {
1096                         ALOGE("%s: allocation of channel failed", __func__);
1097                         pthread_mutex_unlock(&mMutex);
1098                         return -ENOMEM;
1099                     }
1100                     newStream->priv = channel;
1101             } else if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1102                 QCamera3Channel *channel = NULL;
1103                 switch (newStream->format) {
1104                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1105                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1106                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
1107                     channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1108                             mCameraHandle->ops, captureResultCb,
1109                             &gCamCapability[mCameraId]->padding_info,
1110                             this,
1111                             newStream,
1112                             (cam_stream_type_t) stream_config_info.type[i],
1113                             stream_config_info.postprocess_mask[i]);
1114                     if (channel == NULL) {
1115                         ALOGE("%s: allocation of channel failed", __func__);
1116                         pthread_mutex_unlock(&mMutex);
1117                         return -ENOMEM;
1118                     }
1119 
1120                     newStream->priv = channel;
1121                     break;
1122                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1123                 case HAL_PIXEL_FORMAT_RAW16:
1124                 case HAL_PIXEL_FORMAT_RAW10:
1125                     newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
1126                     mRawChannel = new QCamera3RawChannel(
1127                             mCameraHandle->camera_handle,
1128                             mCameraHandle->ops, captureResultCb,
1129                             &gCamCapability[mCameraId]->padding_info,
1130                             this, newStream, CAM_QCOM_FEATURE_NONE,
1131                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1132                     if (mRawChannel == NULL) {
1133                         ALOGE("%s: allocation of raw channel failed", __func__);
1134                         pthread_mutex_unlock(&mMutex);
1135                         return -ENOMEM;
1136                     }
1137 
1138                     newStream->priv = (QCamera3Channel*)mRawChannel;
1139                     break;
1140                 case HAL_PIXEL_FORMAT_BLOB:
1141                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
1142                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1143                             mCameraHandle->ops, captureResultCb,
1144                             &gCamCapability[mCameraId]->padding_info, this, newStream,
1145                             stream_config_info.postprocess_mask[i],
1146                             m_bIs4KVideo, mMetadataChannel);
1147                     if (mPictureChannel == NULL) {
1148                         ALOGE("%s: allocation of channel failed", __func__);
1149                         pthread_mutex_unlock(&mMutex);
1150                         return -ENOMEM;
1151                     }
1152                     newStream->priv = (QCamera3Channel*)mPictureChannel;
1153                     break;
1154 
1155                 default:
1156                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1157                     break;
1158                 }
1159             }
1160 
1161             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1162                     it != mStreamInfo.end(); it++) {
1163                 if ((*it)->stream == newStream) {
1164                     (*it)->channel = (QCamera3Channel*) newStream->priv;
1165                     break;
1166                 }
1167             }
1168         } else {
1169             // Channel already exists for this stream
1170             // Do nothing for now
1171         }
1172     }
1173 
1174     if (mPictureChannel && m_bIs4KVideo) {
1175         mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
1176     }
1177 
1178     //RAW DUMP channel
1179     if (mEnableRawDump && isRawStreamRequested == false){
1180         cam_dimension_t rawDumpSize;
1181         rawDumpSize = getMaxRawSize(mCameraId);
1182         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1183                                   mCameraHandle->ops,
1184                                   rawDumpSize,
1185                                   &gCamCapability[mCameraId]->padding_info,
1186                                   this, CAM_QCOM_FEATURE_NONE);
1187         if (!mRawDumpChannel) {
1188             ALOGE("%s: Raw Dump channel cannot be created", __func__);
1189             pthread_mutex_unlock(&mMutex);
1190             return -ENOMEM;
1191         }
1192     }
1193 
1194 
1195     stream_config_info.num_streams = streamList->num_streams;
1196     if (mSupportChannel) {
1197         stream_config_info.stream_sizes[stream_config_info.num_streams] =
1198                 QCamera3SupportChannel::kDim;
1199         stream_config_info.type[stream_config_info.num_streams] =
1200                 CAM_STREAM_TYPE_CALLBACK;
1201         stream_config_info.num_streams++;
1202     }
1203 
1204     if (mRawDumpChannel) {
1205         cam_dimension_t rawSize;
1206         rawSize = getMaxRawSize(mCameraId);
1207         stream_config_info.stream_sizes[stream_config_info.num_streams] =
1208                 rawSize;
1209         stream_config_info.type[stream_config_info.num_streams] =
1210                 CAM_STREAM_TYPE_RAW;
1211         stream_config_info.num_streams++;
1212     }
1213 
1214     // settings/parameters don't carry over for new configureStreams
1215     int32_t hal_version = CAM_HAL_V3;
1216     memset(mParameters, 0, sizeof(metadata_buffer_t));
1217 
1218     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1219             sizeof(hal_version), &hal_version);
1220 
1221     AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
1222             sizeof(cam_stream_size_info_t), &stream_config_info);
1223 
1224     int32_t tintless_value = 1;
1225     AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
1226                 sizeof(tintless_value), &tintless_value);
1227 
1228     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1229 
1230     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1231     mPendingRequestsList.clear();
1232     mPendingFrameDropList.clear();
1233     // Initialize/Reset the pending buffers list
1234     mPendingBuffersMap.num_buffers = 0;
1235     mPendingBuffersMap.mPendingBufferList.clear();
1236     mPendingReprocessResultList.clear();
1237 
1238     mFirstRequest = true;
1239 
1240     //Get min frame duration for this streams configuration
1241     deriveMinFrameDuration();
1242 
1243     pthread_mutex_unlock(&mMutex);
1244     return rc;
1245 }
1246 
1247 /*===========================================================================
1248  * FUNCTION   : validateCaptureRequest
1249  *
1250  * DESCRIPTION: validate a capture request from camera service
1251  *
1252  * PARAMETERS :
1253  *   @request : request from framework to process
1254  *
1255  * RETURN     :
1256  *
1257  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1258 int QCamera3HardwareInterface::validateCaptureRequest(
1259                     camera3_capture_request_t *request)
1260 {
1261     ssize_t idx = 0;
1262     const camera3_stream_buffer_t *b;
1263     CameraMetadata meta;
1264 
1265     /* Sanity check the request */
1266     if (request == NULL) {
1267         ALOGE("%s: NULL capture request", __func__);
1268         return BAD_VALUE;
1269     }
1270 
1271     if (request->settings == NULL && mFirstRequest) {
1272         /*settings cannot be null for the first request*/
1273         return BAD_VALUE;
1274     }
1275 
1276     uint32_t frameNumber = request->frame_number;
1277     if (request->input_buffer != NULL &&
1278             request->input_buffer->stream != mInputStream) {
1279         ALOGE("%s: Request %d: Input buffer not from input stream!",
1280                 __FUNCTION__, frameNumber);
1281         return BAD_VALUE;
1282     }
1283     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1284         ALOGE("%s: Request %d: No output buffers provided!",
1285                 __FUNCTION__, frameNumber);
1286         return BAD_VALUE;
1287     }
1288     if (request->input_buffer != NULL) {
1289         b = request->input_buffer;
1290         QCamera3Channel *channel =
1291             static_cast<QCamera3Channel*>(b->stream->priv);
1292         if (channel == NULL) {
1293             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1294                     __func__, frameNumber, (long)idx);
1295             return BAD_VALUE;
1296         }
1297         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1298             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1299                     __func__, frameNumber, (long)idx);
1300             return BAD_VALUE;
1301         }
1302         if (b->release_fence != -1) {
1303             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1304                     __func__, frameNumber, (long)idx);
1305             return BAD_VALUE;
1306         }
1307         if (b->buffer == NULL) {
1308             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1309                     __func__, frameNumber, (long)idx);
1310             return BAD_VALUE;
1311         }
1312     }
1313 
1314     // Validate all buffers
1315     b = request->output_buffers;
1316     do {
1317         QCamera3Channel *channel =
1318                 static_cast<QCamera3Channel*>(b->stream->priv);
1319         if (channel == NULL) {
1320             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1321                     __func__, frameNumber, (long)idx);
1322             return BAD_VALUE;
1323         }
1324         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1325             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1326                     __func__, frameNumber, (long)idx);
1327             return BAD_VALUE;
1328         }
1329         if (b->release_fence != -1) {
1330             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1331                     __func__, frameNumber, (long)idx);
1332             return BAD_VALUE;
1333         }
1334         if (b->buffer == NULL) {
1335             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1336                     __func__, frameNumber, (long)idx);
1337             return BAD_VALUE;
1338         }
1339         if (*(b->buffer) == NULL) {
1340             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1341                     __func__, frameNumber, (long)idx);
1342             return BAD_VALUE;
1343         }
1344         idx++;
1345         b = request->output_buffers + idx;
1346     } while (idx < (ssize_t)request->num_output_buffers);
1347 
1348     return NO_ERROR;
1349 }
1350 
1351 /*===========================================================================
1352  * FUNCTION   : deriveMinFrameDuration
1353  *
1354  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1355  *              on currently configured streams.
1356  *
1357  * PARAMETERS : NONE
1358  *
1359  * RETURN     : NONE
1360  *
1361  *==========================================================================*/
deriveMinFrameDuration()1362 void QCamera3HardwareInterface::deriveMinFrameDuration()
1363 {
1364     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1365 
1366     maxJpegDim = 0;
1367     maxProcessedDim = 0;
1368     maxRawDim = 0;
1369 
1370     // Figure out maximum jpeg, processed, and raw dimensions
1371     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1372         it != mStreamInfo.end(); it++) {
1373 
1374         // Input stream doesn't have valid stream_type
1375         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1376             continue;
1377 
1378         int32_t dimension = (*it)->stream->width * (*it)->stream->height;
1379         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1380             if (dimension > maxJpegDim)
1381                 maxJpegDim = dimension;
1382         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1383                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1384                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1385             if (dimension > maxRawDim)
1386                 maxRawDim = dimension;
1387         } else {
1388             if (dimension > maxProcessedDim)
1389                 maxProcessedDim = dimension;
1390         }
1391     }
1392 
1393     //Assume all jpeg dimensions are in processed dimensions.
1394     if (maxJpegDim > maxProcessedDim)
1395         maxProcessedDim = maxJpegDim;
1396     //Find the smallest raw dimension that is greater or equal to jpeg dimension
1397     if (maxProcessedDim > maxRawDim) {
1398         maxRawDim = INT32_MAX;
1399         for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1400             i++) {
1401 
1402             int32_t dimension =
1403                 gCamCapability[mCameraId]->raw_dim[i].width *
1404                 gCamCapability[mCameraId]->raw_dim[i].height;
1405 
1406             if (dimension >= maxProcessedDim && dimension < maxRawDim)
1407                 maxRawDim = dimension;
1408         }
1409     }
1410 
1411     //Find minimum durations for processed, jpeg, and raw
1412     for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1413             i++) {
1414         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1415                 gCamCapability[mCameraId]->raw_dim[i].height) {
1416             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1417             break;
1418         }
1419     }
1420     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1421         if (maxProcessedDim ==
1422             gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1423             gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1424             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1425             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1426             break;
1427         }
1428     }
1429 }
1430 
1431 /*===========================================================================
1432  * FUNCTION   : getMinFrameDuration
1433  *
1434  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1435  *              and current request configuration.
1436  *
1437  * PARAMETERS : @request: requset sent by the frameworks
1438  *
1439  * RETURN     : min farme duration for a particular request
1440  *
1441  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)1442 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1443 {
1444     bool hasJpegStream = false;
1445     bool hasRawStream = false;
1446     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1447         const camera3_stream_t *stream = request->output_buffers[i].stream;
1448         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1449             hasJpegStream = true;
1450         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1451                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1452                 stream->format == HAL_PIXEL_FORMAT_RAW16)
1453             hasRawStream = true;
1454     }
1455 
1456     if (!hasJpegStream)
1457         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1458     else
1459         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1460 }
1461 
1462 /*===========================================================================
1463  * FUNCTION   : handlePendingReprocResults
1464  *
1465  * DESCRIPTION: check and notify on any pending reprocess results
1466  *
1467  * PARAMETERS :
1468  *   @frame_number   : Pending request frame number
1469  *
1470  * RETURN     : int32_t type of status
1471  *              NO_ERROR  -- success
1472  *              none-zero failure code
1473  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)1474 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
1475 {
1476     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
1477             j != mPendingReprocessResultList.end(); j++) {
1478         if (j->frame_number == frame_number) {
1479             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
1480 
1481             CDBG("%s: Delayed reprocess notify %d", __func__,
1482                     frame_number);
1483 
1484             for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
1485                 k != mPendingRequestsList.end(); k++) {
1486 
1487                 if (k->frame_number == j->frame_number) {
1488                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
1489                             "Take it out!!", __func__,
1490                             k->frame_number);
1491 
1492                     camera3_capture_result result;
1493                     memset(&result, 0, sizeof(camera3_capture_result));
1494                     result.frame_number = frame_number;
1495                     result.num_output_buffers = 1;
1496                     result.output_buffers =  &j->buffer;
1497                     result.input_buffer = k->input_buffer;
1498                     result.result = k->settings;
1499                     result.partial_result = PARTIAL_RESULT_COUNT;
1500                     mCallbackOps->process_capture_result(mCallbackOps, &result);
1501 
1502                     mPendingRequestsList.erase(k);
1503                     mPendingRequest--;
1504                     break;
1505                 }
1506             }
1507             mPendingReprocessResultList.erase(j);
1508             break;
1509         }
1510     }
1511     return NO_ERROR;
1512 }
1513 
1514 /*===========================================================================
1515  * FUNCTION   : handleMetadataWithLock
1516  *
1517  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1518  *
1519  * PARAMETERS : @metadata_buf: metadata buffer
1520  *
1521  * RETURN     :
1522  *
1523  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf)1524 void QCamera3HardwareInterface::handleMetadataWithLock(
1525     mm_camera_super_buf_t *metadata_buf)
1526 {
1527     ATRACE_CALL();
1528     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1529     int32_t frame_number_valid = *(int32_t *)
1530         POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1531     uint32_t frame_number = *(uint32_t *)
1532         POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
1533     nsecs_t capture_time = *(int64_t *)
1534         POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1535     cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1536         POINTER_OF_META(CAM_INTF_META_FRAME_DROPPED, metadata);
1537     camera3_notify_msg_t notify_msg;
1538 
1539     int32_t urgent_frame_number_valid = *(int32_t *)
1540         POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1541     uint32_t urgent_frame_number = *(uint32_t *)
1542         POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1543 
1544     if (urgent_frame_number_valid) {
1545         CDBG("%s: valid urgent frame_number = %d, capture_time = %lld",
1546           __func__, urgent_frame_number, capture_time);
1547 
1548         //Recieved an urgent Frame Number, handle it
1549         //using partial results
1550         for (List<PendingRequestInfo>::iterator i =
1551             mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1552             CDBG("%s: Iterator Frame = %d urgent frame = %d",
1553                 __func__, i->frame_number, urgent_frame_number);
1554 
1555             if (i->frame_number < urgent_frame_number &&
1556                 i->partial_result_cnt == 0) {
1557                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
1558                     __func__, i->frame_number);
1559             }
1560 
1561             if (i->frame_number == urgent_frame_number &&
1562                      i->bUrgentReceived == 0) {
1563 
1564                 camera3_capture_result_t result;
1565                 memset(&result, 0, sizeof(camera3_capture_result_t));
1566 
1567                 i->partial_result_cnt++;
1568                 i->bUrgentReceived = 1;
1569                 // Extract 3A metadata
1570                 result.result =
1571                     translateCbUrgentMetadataToResultMetadata(metadata);
1572                 // Populate metadata result
1573                 result.frame_number = urgent_frame_number;
1574                 result.num_output_buffers = 0;
1575                 result.output_buffers = NULL;
1576                 result.partial_result = i->partial_result_cnt;
1577 
1578                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1579                 CDBG("%s: urgent frame_number = %d, capture_time = %lld",
1580                      __func__, result.frame_number, capture_time);
1581                 free_camera_metadata((camera_metadata_t *)result.result);
1582                 break;
1583             }
1584         }
1585     }
1586 
1587     if (!frame_number_valid) {
1588         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
1589         mMetadataChannel->bufDone(metadata_buf);
1590         free(metadata_buf);
1591         goto done_metadata;
1592     }
1593     CDBG("%s: valid frame_number = %d, capture_time = %lld", __func__,
1594             frame_number, capture_time);
1595 
1596     // Go through the pending requests info and send shutter/results to frameworks
1597     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1598         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1599         camera3_capture_result_t result;
1600         memset(&result, 0, sizeof(camera3_capture_result_t));
1601 
1602         CDBG("%s: frame_number in the list is %d", __func__, i->frame_number);
1603         i->partial_result_cnt++;
1604         result.partial_result = i->partial_result_cnt;
1605 
1606         // Flush out all entries with less or equal frame numbers.
1607         mPendingRequest--;
1608 
1609         // Check whether any stream buffer corresponding to this is dropped or not
1610         // If dropped, then send the ERROR_BUFFER for the corresponding stream
1611         if (cam_frame_drop.frame_dropped) {
1612             camera3_notify_msg_t notify_msg;
1613             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1614                     j != i->buffers.end(); j++) {
1615                 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1616                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1617                 for (uint32_t k = 0; k < cam_frame_drop.cam_stream_ID.num_streams; k++) {
1618                    if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1619                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1620                        CDBG("%s: Start of reporting error frame#=%d, streamID=%d",
1621                               __func__, i->frame_number, streamID);
1622                        notify_msg.type = CAMERA3_MSG_ERROR;
1623                        notify_msg.message.error.frame_number = i->frame_number;
1624                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1625                        notify_msg.message.error.error_stream = j->stream;
1626                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1627                        CDBG("%s: End of reporting error frame#=%d, streamID=%d",
1628                               __func__, i->frame_number, streamID);
1629                        PendingFrameDropInfo PendingFrameDrop;
1630                        PendingFrameDrop.frame_number=i->frame_number;
1631                        PendingFrameDrop.stream_ID = streamID;
1632                        // Add the Frame drop info to mPendingFrameDropList
1633                        mPendingFrameDropList.push_back(PendingFrameDrop);
1634                    }
1635                 }
1636             }
1637         }
1638 
1639         // Send empty metadata with already filled buffers for dropped metadata
1640         // and send valid metadata with already filled buffers for current metadata
1641         if (i->frame_number < frame_number) {
1642             camera3_notify_msg_t notify_msg;
1643             notify_msg.type = CAMERA3_MSG_SHUTTER;
1644             notify_msg.message.shutter.frame_number = i->frame_number;
1645             notify_msg.message.shutter.timestamp = capture_time -
1646                     (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1647             mCallbackOps->notify(mCallbackOps, &notify_msg);
1648             i->timestamp = notify_msg.message.shutter.timestamp;
1649             CDBG("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1650                     __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1651 
1652             CameraMetadata dummyMetadata;
1653             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1654                     &i->timestamp, 1);
1655             dummyMetadata.update(ANDROID_REQUEST_ID,
1656                     &(i->request_id), 1);
1657             result.result = dummyMetadata.release();
1658         } else {
1659 
1660             // Send shutter notify to frameworks
1661             notify_msg.type = CAMERA3_MSG_SHUTTER;
1662             notify_msg.message.shutter.frame_number = i->frame_number;
1663             notify_msg.message.shutter.timestamp = capture_time;
1664             mCallbackOps->notify(mCallbackOps, &notify_msg);
1665 
1666             i->timestamp = capture_time;
1667 
1668             result.result = translateFromHalMetadata(metadata,
1669                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
1670                     i->capture_intent);
1671 
1672             if (i->blob_request) {
1673                 {
1674                     //Dump tuning metadata if enabled and available
1675                     char prop[PROPERTY_VALUE_MAX];
1676                     memset(prop, 0, sizeof(prop));
1677                     property_get("persist.camera.dumpmetadata", prop, "0");
1678                     int32_t enabled = atoi(prop);
1679                     if (enabled && metadata->is_tuning_params_valid) {
1680                         dumpMetadataToFile(metadata->tuning_params,
1681                                mMetaFrameCount,
1682                                enabled,
1683                                "Snapshot",
1684                                frame_number);
1685                     }
1686                 }
1687 
1688 
1689                 mPictureChannel->queueReprocMetadata(metadata_buf);
1690             } else {
1691                 // Return metadata buffer
1692                 mMetadataChannel->bufDone(metadata_buf);
1693                 free(metadata_buf);
1694             }
1695         }
1696         if (!result.result) {
1697             ALOGE("%s: metadata is NULL", __func__);
1698         }
1699         result.frame_number = i->frame_number;
1700         result.num_output_buffers = 0;
1701         result.output_buffers = NULL;
1702         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1703                     j != i->buffers.end(); j++) {
1704             if (j->buffer) {
1705                 result.num_output_buffers++;
1706             }
1707         }
1708 
1709         if (result.num_output_buffers > 0) {
1710             camera3_stream_buffer_t *result_buffers =
1711                 new camera3_stream_buffer_t[result.num_output_buffers];
1712             if (!result_buffers) {
1713                 ALOGE("%s: Fatal error: out of memory", __func__);
1714             }
1715             size_t result_buffers_idx = 0;
1716             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1717                     j != i->buffers.end(); j++) {
1718                 if (j->buffer) {
1719                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1720                             m != mPendingFrameDropList.end(); m++) {
1721                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1722                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1723                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
1724                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1725                             CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1726                                   __func__, frame_number, streamID);
1727                             m = mPendingFrameDropList.erase(m);
1728                             break;
1729                         }
1730                     }
1731 
1732                     for (List<PendingBufferInfo>::iterator k =
1733                       mPendingBuffersMap.mPendingBufferList.begin();
1734                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1735                       if (k->buffer == j->buffer->buffer) {
1736                         CDBG("%s: Found buffer %p in pending buffer List "
1737                               "for frame %d, Take it out!!", __func__,
1738                                k->buffer, k->frame_number);
1739                         mPendingBuffersMap.num_buffers--;
1740                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
1741                         break;
1742                       }
1743                     }
1744 
1745                     result_buffers[result_buffers_idx++] = *(j->buffer);
1746                     free(j->buffer);
1747                     j->buffer = NULL;
1748                 }
1749             }
1750             result.output_buffers = result_buffers;
1751             mCallbackOps->process_capture_result(mCallbackOps, &result);
1752             CDBG("%s: meta frame_number = %d, capture_time = %lld",
1753                     __func__, result.frame_number, i->timestamp);
1754             free_camera_metadata((camera_metadata_t *)result.result);
1755             delete[] result_buffers;
1756         } else {
1757             mCallbackOps->process_capture_result(mCallbackOps, &result);
1758             CDBG("%s: meta frame_number = %d, capture_time = %lld",
1759                         __func__, result.frame_number, i->timestamp);
1760             free_camera_metadata((camera_metadata_t *)result.result);
1761         }
1762         // erase the element from the list
1763         i = mPendingRequestsList.erase(i);
1764 
1765         if (!mPendingReprocessResultList.empty()) {
1766             handlePendingReprocResults(frame_number + 1);
1767         }
1768     }
1769 
1770 done_metadata:
1771     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1772         i != mPendingRequestsList.end() ;i++) {
1773         i->pipeline_depth++;
1774     }
1775     unblockRequestIfNecessary();
1776 
1777 }
1778 
1779 /*===========================================================================
1780  * FUNCTION   : handleBufferWithLock
1781  *
1782  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1783  *
1784  * PARAMETERS : @buffer: image buffer for the callback
1785  *              @frame_number: frame number of the image buffer
1786  *
1787  * RETURN     :
1788  *
1789  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)1790 void QCamera3HardwareInterface::handleBufferWithLock(
1791     camera3_stream_buffer_t *buffer, uint32_t frame_number)
1792 {
1793     ATRACE_CALL();
1794     // If the frame number doesn't exist in the pending request list,
1795     // directly send the buffer to the frameworks, and update pending buffers map
1796     // Otherwise, book-keep the buffer.
1797     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1798     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1799         i++;
1800     }
1801     if (i == mPendingRequestsList.end()) {
1802         // Verify all pending requests frame_numbers are greater
1803         for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1804                 j != mPendingRequestsList.end(); j++) {
1805             if (j->frame_number < frame_number) {
1806                 ALOGE("%s: Error: pending frame number %d is smaller than %d",
1807                         __func__, j->frame_number, frame_number);
1808             }
1809         }
1810         camera3_capture_result_t result;
1811         memset(&result, 0, sizeof(camera3_capture_result_t));
1812         result.result = NULL;
1813         result.frame_number = frame_number;
1814         result.num_output_buffers = 1;
1815         result.partial_result = 0;
1816         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1817                 m != mPendingFrameDropList.end(); m++) {
1818             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1819             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1820             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
1821                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1822                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1823                         __func__, frame_number, streamID);
1824                 m = mPendingFrameDropList.erase(m);
1825                 break;
1826             }
1827         }
1828         result.output_buffers = buffer;
1829         CDBG("%s: result frame_number = %d, buffer = %p",
1830                 __func__, frame_number, buffer->buffer);
1831 
1832         for (List<PendingBufferInfo>::iterator k =
1833                 mPendingBuffersMap.mPendingBufferList.begin();
1834                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1835             if (k->buffer == buffer->buffer) {
1836                 CDBG("%s: Found Frame buffer, take it out from list",
1837                         __func__);
1838 
1839                 mPendingBuffersMap.num_buffers--;
1840                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1841                 break;
1842             }
1843         }
1844         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
1845             __func__, mPendingBuffersMap.num_buffers);
1846 
1847         mCallbackOps->process_capture_result(mCallbackOps, &result);
1848     } else {
1849         if (i->input_buffer) {
1850             CameraMetadata settings;
1851             camera3_notify_msg_t notify_msg;
1852             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
1853             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
1854             if(i->settings) {
1855                 settings = i->settings;
1856                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
1857                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
1858                 } else {
1859                     ALOGE("%s: No timestamp in input settings! Using current one.",
1860                             __func__);
1861                 }
1862             } else {
1863                 ALOGE("%s: Input settings missing!", __func__);
1864             }
1865 
1866             notify_msg.type = CAMERA3_MSG_SHUTTER;
1867             notify_msg.message.shutter.frame_number = frame_number;
1868             notify_msg.message.shutter.timestamp = capture_time;
1869 
1870             sp<Fence> releaseFence = new Fence(i->input_buffer->release_fence);
1871             int32_t rc = releaseFence->wait(Fence::TIMEOUT_NEVER);
1872             if (rc != OK) {
1873                 ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
1874             }
1875 
1876             for (List<PendingBufferInfo>::iterator k =
1877                     mPendingBuffersMap.mPendingBufferList.begin();
1878                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1879                 if (k->buffer == buffer->buffer) {
1880                     CDBG("%s: Found Frame buffer, take it out from list",
1881                             __func__);
1882 
1883                     mPendingBuffersMap.num_buffers--;
1884                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
1885                     break;
1886                 }
1887             }
1888             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
1889                 __func__, mPendingBuffersMap.num_buffers);
1890 
1891             bool notifyNow = true;
1892             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1893                     j != mPendingRequestsList.end(); j++) {
1894                 if (j->frame_number < frame_number) {
1895                     notifyNow = false;
1896                     break;
1897                 }
1898             }
1899 
1900             if (notifyNow) {
1901                 camera3_capture_result result;
1902                 memset(&result, 0, sizeof(camera3_capture_result));
1903                 result.frame_number = frame_number;
1904                 result.result = i->settings;
1905                 result.input_buffer = i->input_buffer;
1906                 result.num_output_buffers = 1;
1907                 result.output_buffers = buffer;
1908                 result.partial_result = PARTIAL_RESULT_COUNT;
1909 
1910                 mCallbackOps->notify(mCallbackOps, &notify_msg);
1911                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1912                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
1913                 i = mPendingRequestsList.erase(i);
1914                 mPendingRequest--;
1915             } else {
1916                 // Cache reprocess result for later
1917                 PendingReprocessResult pendingResult;
1918                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
1919                 pendingResult.notify_msg = notify_msg;
1920                 pendingResult.buffer = *buffer;
1921                 pendingResult.frame_number = frame_number;
1922                 mPendingReprocessResultList.push_back(pendingResult);
1923                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
1924             }
1925         } else {
1926             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1927                 j != i->buffers.end(); j++) {
1928                 if (j->stream == buffer->stream) {
1929                     if (j->buffer != NULL) {
1930                         ALOGE("%s: Error: buffer is already set", __func__);
1931                     } else {
1932                         j->buffer = (camera3_stream_buffer_t *)malloc(
1933                             sizeof(camera3_stream_buffer_t));
1934                         *(j->buffer) = *buffer;
1935                         CDBG("%s: cache buffer %p at result frame_number %d",
1936                             __func__, buffer, frame_number);
1937                     }
1938                 }
1939             }
1940         }
1941     }
1942 }
1943 
1944 /*===========================================================================
1945  * FUNCTION   : unblockRequestIfNecessary
1946  *
1947  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1948  *              that mMutex is held when this function is called.
1949  *
1950  * PARAMETERS :
1951  *
1952  * RETURN     :
1953  *
1954  *==========================================================================*/
unblockRequestIfNecessary()1955 void QCamera3HardwareInterface::unblockRequestIfNecessary()
1956 {
1957    // Unblock process_capture_request
1958    pthread_cond_signal(&mRequestCond);
1959 }
1960 
1961 /*===========================================================================
1962  * FUNCTION   : processCaptureRequest
1963  *
1964  * DESCRIPTION: process a capture request from camera service
1965  *
1966  * PARAMETERS :
1967  *   @request : request from framework to process
1968  *
1969  * RETURN     :
1970  *
1971  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)1972 int QCamera3HardwareInterface::processCaptureRequest(
1973                     camera3_capture_request_t *request)
1974 {
1975     ATRACE_CALL();
1976     int rc = NO_ERROR;
1977     int32_t request_id;
1978     CameraMetadata meta;
1979 
1980     pthread_mutex_lock(&mMutex);
1981 
1982     rc = validateCaptureRequest(request);
1983     if (rc != NO_ERROR) {
1984         ALOGE("%s: incoming request is not valid", __func__);
1985         pthread_mutex_unlock(&mMutex);
1986         return rc;
1987     }
1988 
1989     meta = request->settings;
1990 
1991     // For first capture request, send capture intent, and
1992     // stream on all streams
1993     if (mFirstRequest) {
1994 
1995          /* get eis information for stream configuration */
1996         cam_is_type_t is_type;
1997         char is_type_value[PROPERTY_VALUE_MAX];
1998         property_get("camera.is_type", is_type_value, "0");
1999         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2000 
2001         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2002             int32_t hal_version = CAM_HAL_V3;
2003             uint8_t captureIntent =
2004                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2005             mCaptureIntent = captureIntent;
2006             memset(mParameters, 0, sizeof(parm_buffer_t));
2007             AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2008                 sizeof(hal_version), &hal_version);
2009             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2010                 sizeof(captureIntent), &captureIntent);
2011         }
2012 
2013         //If EIS is enabled, turn it on for video
2014         //for camera use case, front camcorder and 4k video, no eis
2015         bool setEis = mEisEnable && (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
2016             (mCaptureIntent ==  CAMERA3_TEMPLATE_VIDEO_RECORD ||
2017              mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
2018         int32_t vsMode;
2019         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2020         rc = AddSetParmEntryToBatch(mParameters,
2021                 CAM_INTF_PARM_DIS_ENABLE,
2022                 sizeof(vsMode), &vsMode);
2023 
2024         //IS type will be 0 unless EIS is supported. If EIS is supported
2025         //it could either be 1 or 4 depending on the stream and video size
2026         if (setEis){
2027             if (m_bIs4KVideo) {
2028                 is_type = IS_TYPE_DIS;
2029             } else {
2030                 is_type = IS_TYPE_EIS_2_0;
2031             }
2032         }
2033 
2034         for (size_t i = 0; i < request->num_output_buffers; i++) {
2035             const camera3_stream_buffer_t& output = request->output_buffers[i];
2036             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2037             /*for livesnapshot stream is_type will be DIS*/
2038             if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2039                 rc = channel->registerBuffer(output.buffer,
2040                         IS_TYPE_DIS, mCaptureIntent);
2041             } else {
2042                 rc = channel->registerBuffer(output.buffer,
2043                         is_type, mCaptureIntent);
2044             }
2045             if (rc < 0) {
2046                 ALOGE("%s: registerBuffer failed",
2047                         __func__);
2048                 pthread_mutex_unlock(&mMutex);
2049                 return -ENODEV;
2050             }
2051         }
2052 
2053         /*set the capture intent, hal version and dis enable parameters to the backend*/
2054         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2055                     mParameters);
2056 
2057 
2058         //First initialize all streams
2059         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2060             it != mStreamInfo.end(); it++) {
2061             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2062             if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2063                 rc = channel->initialize(IS_TYPE_DIS, mCaptureIntent);
2064             } else {
2065                 rc = channel->initialize(is_type, mCaptureIntent);
2066             }
2067             if (NO_ERROR != rc) {
2068                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
2069                 pthread_mutex_unlock(&mMutex);
2070                 return rc;
2071             }
2072         }
2073 
2074         if (mRawDumpChannel) {
2075             rc = mRawDumpChannel->initialize(is_type, mCaptureIntent);
2076             if (rc != NO_ERROR) {
2077                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
2078                 pthread_mutex_unlock(&mMutex);
2079                 return rc;
2080             }
2081         }
2082         if (mSupportChannel) {
2083             rc = mSupportChannel->initialize(is_type, mCaptureIntent);
2084             if (rc < 0) {
2085                 ALOGE("%s: Support channel initialization failed", __func__);
2086                 pthread_mutex_unlock(&mMutex);
2087                 return rc;
2088             }
2089         }
2090 
2091         //Then start them.
2092         CDBG_HIGH("%s: Start META Channel", __func__);
2093         rc = mMetadataChannel->start();
2094         if (rc < 0) {
2095             ALOGE("%s: META channel start failed", __func__);
2096             pthread_mutex_unlock(&mMutex);
2097             return rc;
2098         }
2099 
2100         if (mSupportChannel) {
2101             rc = mSupportChannel->start();
2102             if (rc < 0) {
2103                 ALOGE("%s: Support channel start failed", __func__);
2104                 mMetadataChannel->stop();
2105                 pthread_mutex_unlock(&mMutex);
2106                 return rc;
2107             }
2108         }
2109         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2110             it != mStreamInfo.end(); it++) {
2111             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2112             CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
2113             rc = channel->start();
2114             if (rc < 0) {
2115                 ALOGE("%s: channel start failed", __func__);
2116                 pthread_mutex_unlock(&mMutex);
2117                 return rc;
2118             }
2119         }
2120 
2121         if (mRawDumpChannel) {
2122             CDBG("%s: Starting raw dump stream",__func__);
2123             rc = mRawDumpChannel->start();
2124             if (rc != NO_ERROR) {
2125                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
2126                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2127                       it != mStreamInfo.end(); it++) {
2128                     QCamera3Channel *channel =
2129                         (QCamera3Channel *)(*it)->stream->priv;
2130                     ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
2131                         channel->getStreamTypeMask());
2132                     channel->stop();
2133                 }
2134                 if (mSupportChannel)
2135                     mSupportChannel->stop();
2136                 mMetadataChannel->stop();
2137                 pthread_mutex_unlock(&mMutex);
2138                 return rc;
2139             }
2140         }
2141         mWokenUpByDaemon = false;
2142         mPendingRequest = 0;
2143     }
2144 
2145     uint32_t frameNumber = request->frame_number;
2146     cam_stream_ID_t streamID;
2147 
2148     if (meta.exists(ANDROID_REQUEST_ID)) {
2149         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
2150         mCurrentRequestId = request_id;
2151         CDBG("%s: Received request with id: %d",__func__, request_id);
2152     } else if (mFirstRequest || mCurrentRequestId == -1){
2153         ALOGE("%s: Unable to find request id field, \
2154                 & no previous id available", __func__);
2155         return NAME_NOT_FOUND;
2156     } else {
2157         CDBG("%s: Re-using old request id", __func__);
2158         request_id = mCurrentRequestId;
2159     }
2160 
2161     CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
2162                                     __func__, __LINE__,
2163                                     request->num_output_buffers,
2164                                     request->input_buffer,
2165                                     frameNumber);
2166     // Acquire all request buffers first
2167     streamID.num_streams = 0;
2168     int blob_request = 0;
2169     uint32_t snapshotStreamId = 0;
2170     for (size_t i = 0; i < request->num_output_buffers; i++) {
2171         const camera3_stream_buffer_t& output = request->output_buffers[i];
2172         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2173         sp<Fence> acquireFence = new Fence(output.acquire_fence);
2174 
2175         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2176             //Call function to store local copy of jpeg data for encode params.
2177             blob_request = 1;
2178             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
2179         }
2180 
2181         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2182         if (rc != OK) {
2183             ALOGE("%s: fence wait failed %d", __func__, rc);
2184             pthread_mutex_unlock(&mMutex);
2185             return rc;
2186         }
2187 
2188         streamID.streamID[streamID.num_streams] =
2189             channel->getStreamID(channel->getStreamTypeMask());
2190         streamID.num_streams++;
2191 
2192 
2193     }
2194 
2195     if (blob_request && mRawDumpChannel) {
2196         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
2197         streamID.streamID[streamID.num_streams] =
2198             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
2199         streamID.num_streams++;
2200     }
2201 
2202     if(request->input_buffer == NULL) {
2203        rc = setFrameParameters(request, streamID, snapshotStreamId);
2204         if (rc < 0) {
2205             ALOGE("%s: fail to set frame parameters", __func__);
2206             pthread_mutex_unlock(&mMutex);
2207             return rc;
2208         }
2209     } else {
2210         sp<Fence> acquireFence = new Fence(request->input_buffer->acquire_fence);
2211 
2212         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2213         if (rc != OK) {
2214             ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
2215             pthread_mutex_unlock(&mMutex);
2216             return rc;
2217         }
2218     }
2219 
2220     /* Update pending request list and pending buffers map */
2221     PendingRequestInfo pendingRequest;
2222     pendingRequest.frame_number = frameNumber;
2223     pendingRequest.num_buffers = request->num_output_buffers;
2224     pendingRequest.request_id = request_id;
2225     pendingRequest.blob_request = blob_request;
2226     pendingRequest.bUrgentReceived = 0;
2227 
2228     pendingRequest.input_buffer = request->input_buffer;
2229     pendingRequest.settings = request->settings;
2230     pendingRequest.pipeline_depth = 0;
2231     pendingRequest.partial_result_cnt = 0;
2232     extractJpegMetadata(pendingRequest.jpegMetadata, request);
2233 
2234     //extract capture intent
2235     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2236         mCaptureIntent =
2237                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2238     }
2239     pendingRequest.capture_intent = mCaptureIntent;
2240 
2241     for (size_t i = 0; i < request->num_output_buffers; i++) {
2242         RequestedBufferInfo requestedBuf;
2243         requestedBuf.stream = request->output_buffers[i].stream;
2244         requestedBuf.buffer = NULL;
2245         pendingRequest.buffers.push_back(requestedBuf);
2246 
2247         // Add to buffer handle the pending buffers list
2248         PendingBufferInfo bufferInfo;
2249         bufferInfo.frame_number = frameNumber;
2250         bufferInfo.buffer = request->output_buffers[i].buffer;
2251         bufferInfo.stream = request->output_buffers[i].stream;
2252         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
2253         mPendingBuffersMap.num_buffers++;
2254         CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
2255           __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
2256           bufferInfo.stream->format);
2257     }
2258     CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2259           __func__, mPendingBuffersMap.num_buffers);
2260 
2261     mPendingRequestsList.push_back(pendingRequest);
2262 
2263     if(mFlush) {
2264         pthread_mutex_unlock(&mMutex);
2265         return NO_ERROR;
2266     }
2267 
2268     // Notify metadata channel we receive a request
2269     mMetadataChannel->request(NULL, frameNumber);
2270 
2271     // Call request on other streams
2272     for (size_t i = 0; i < request->num_output_buffers; i++) {
2273         const camera3_stream_buffer_t& output = request->output_buffers[i];
2274         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2275 
2276         if (channel == NULL) {
2277             ALOGE("%s: invalid channel pointer for stream", __func__);
2278             continue;
2279         }
2280 
2281         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2282             QCamera3RegularChannel* inputChannel = NULL;
2283             if(request->input_buffer != NULL){
2284 
2285                 //Try to get the internal format
2286                 inputChannel = (QCamera3RegularChannel*)
2287                     request->input_buffer->stream->priv;
2288                 if(inputChannel == NULL ){
2289                     ALOGE("%s: failed to get input channel handle", __func__);
2290                     pthread_mutex_unlock(&mMutex);
2291                     return NO_INIT;
2292                 }
2293                 metadata_buffer_t reproc_meta;
2294                 rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
2295                 if (NO_ERROR == rc) {
2296                     rc = channel->request(output.buffer, frameNumber,
2297                             request->input_buffer, &reproc_meta);
2298                     if (rc < 0) {
2299                         ALOGE("%s: Fail to request on picture channel", __func__);
2300                         pthread_mutex_unlock(&mMutex);
2301                         return rc;
2302                     }
2303                 } else {
2304                     ALOGE("%s: fail to set reproc parameters", __func__);
2305                     pthread_mutex_unlock(&mMutex);
2306                     return rc;
2307                 }
2308             } else
2309                 rc = channel->request(output.buffer, frameNumber,
2310                             NULL, mParameters);
2311         } else {
2312             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
2313                 __LINE__, output.buffer, frameNumber);
2314            rc = channel->request(output.buffer, frameNumber);
2315         }
2316         if (rc < 0)
2317             ALOGE("%s: request failed", __func__);
2318     }
2319 
2320     if(request->input_buffer == NULL) {
2321         /*set the parameters to backend*/
2322         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2323     }
2324 
2325     mFirstRequest = false;
2326     // Added a timed condition wait
2327     struct timespec ts;
2328     uint8_t isValidTimeout = 1;
2329     rc = clock_gettime(CLOCK_REALTIME, &ts);
2330     if (rc < 0) {
2331       isValidTimeout = 0;
2332       ALOGE("%s: Error reading the real time clock!!", __func__);
2333     }
2334     else {
2335       // Make timeout as 5 sec for request to be honored
2336       ts.tv_sec += 5;
2337     }
2338     //Block on conditional variable
2339 
2340     mPendingRequest++;
2341     while (mPendingRequest >= MIN_INFLIGHT_REQUESTS) {
2342         if (!isValidTimeout) {
2343             CDBG("%s: Blocking on conditional wait", __func__);
2344             pthread_cond_wait(&mRequestCond, &mMutex);
2345         }
2346         else {
2347             CDBG("%s: Blocking on timed conditional wait", __func__);
2348             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
2349             if (rc == ETIMEDOUT) {
2350                 rc = -ENODEV;
2351                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
2352                 break;
2353             }
2354         }
2355         CDBG("%s: Unblocked", __func__);
2356         if (mWokenUpByDaemon) {
2357             mWokenUpByDaemon = false;
2358             if (mPendingRequest < MAX_INFLIGHT_REQUESTS)
2359                 break;
2360         }
2361     }
2362     pthread_mutex_unlock(&mMutex);
2363 
2364     return rc;
2365 }
2366 
2367 /*===========================================================================
2368  * FUNCTION   : dump
2369  *
2370  * DESCRIPTION:
2371  *
2372  * PARAMETERS :
2373  *
2374  *
2375  * RETURN     :
2376  *==========================================================================*/
dump(int fd)2377 void QCamera3HardwareInterface::dump(int fd)
2378 {
2379     pthread_mutex_lock(&mMutex);
2380     dprintf(fd, "\n Camera HAL3 information Begin \n");
2381 
2382     dprintf(fd, "\nNumber of pending requests: %d \n",
2383         mPendingRequestsList.size());
2384     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2385     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
2386     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2387     for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2388         i != mPendingRequestsList.end(); i++) {
2389         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
2390         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
2391         i->input_buffer);
2392     }
2393     dprintf(fd, "\nPending buffer map: Number of buffers: %d\n",
2394                 mPendingBuffersMap.num_buffers);
2395     dprintf(fd, "-------+-------------\n");
2396     dprintf(fd, " Frame | Stream type \n");
2397     dprintf(fd, "-------+-------------\n");
2398     for(List<PendingBufferInfo>::iterator i =
2399         mPendingBuffersMap.mPendingBufferList.begin();
2400         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
2401         dprintf(fd, " %5d | %11d \n",
2402             i->frame_number, i->stream->stream_type);
2403     }
2404     dprintf(fd, "-------+-------------\n");
2405 
2406     dprintf(fd, "\nPending frame drop list: %d\n",
2407         mPendingFrameDropList.size());
2408     dprintf(fd, "-------+-----------\n");
2409     dprintf(fd, " Frame | Stream ID \n");
2410     dprintf(fd, "-------+-----------\n");
2411     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
2412         i != mPendingFrameDropList.end(); i++) {
2413         dprintf(fd, " %5d | %9d \n",
2414             i->frame_number, i->stream_ID);
2415     }
2416     dprintf(fd, "-------+-----------\n");
2417 
2418     dprintf(fd, "\n Camera HAL3 information End \n");
2419     pthread_mutex_unlock(&mMutex);
2420     return;
2421 }
2422 
2423 /*===========================================================================
2424  * FUNCTION   : flush
2425  *
2426  * DESCRIPTION:
2427  *
2428  * PARAMETERS :
2429  *
2430  *
2431  * RETURN     :
2432  *==========================================================================*/
flush()2433 int QCamera3HardwareInterface::flush()
2434 {
2435     ATRACE_CALL();
2436     unsigned int frameNum = 0;
2437     camera3_notify_msg_t notify_msg;
2438     camera3_capture_result_t result;
2439     camera3_stream_buffer_t *pStream_Buf = NULL;
2440     FlushMap flushMap;
2441 
2442     CDBG("%s: Unblocking Process Capture Request", __func__);
2443     pthread_mutex_lock(&mMutex);
2444     mFlush = true;
2445     pthread_mutex_unlock(&mMutex);
2446 
2447     memset(&result, 0, sizeof(camera3_capture_result_t));
2448 
2449     // Stop the Streams/Channels
2450     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2451         it != mStreamInfo.end(); it++) {
2452         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2453         channel->stop();
2454         (*it)->status = INVALID;
2455     }
2456 
2457     if (mSupportChannel) {
2458         mSupportChannel->stop();
2459     }
2460     if (mRawDumpChannel) {
2461         mRawDumpChannel->stop();
2462     }
2463     if (mMetadataChannel) {
2464         /* If content of mStreamInfo is not 0, there is metadata stream */
2465         mMetadataChannel->stop();
2466     }
2467 
2468     // Mutex Lock
2469     pthread_mutex_lock(&mMutex);
2470 
2471     // Unblock process_capture_request
2472     mPendingRequest = 0;
2473     pthread_cond_signal(&mRequestCond);
2474 
2475     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2476     frameNum = i->frame_number;
2477     CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
2478       __func__, frameNum);
2479 
2480     // Go through the pending buffers and group them depending
2481     // on frame number
2482     for (List<PendingBufferInfo>::iterator k =
2483             mPendingBuffersMap.mPendingBufferList.begin();
2484             k != mPendingBuffersMap.mPendingBufferList.end();) {
2485 
2486         if (k->frame_number < frameNum) {
2487             ssize_t idx = flushMap.indexOfKey(k->frame_number);
2488             if (idx == NAME_NOT_FOUND) {
2489                 Vector<PendingBufferInfo> pending;
2490                 pending.add(*k);
2491                 flushMap.add(k->frame_number, pending);
2492             } else {
2493                 Vector<PendingBufferInfo> &pending =
2494                         flushMap.editValueFor(k->frame_number);
2495                 pending.add(*k);
2496             }
2497 
2498             mPendingBuffersMap.num_buffers--;
2499             k = mPendingBuffersMap.mPendingBufferList.erase(k);
2500         } else {
2501             k++;
2502         }
2503     }
2504 
2505     for (size_t i = 0; i < flushMap.size(); i++) {
2506         uint32_t frame_number = flushMap.keyAt(i);
2507         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2508 
2509         // Send Error notify to frameworks for each buffer for which
2510         // metadata buffer is already sent
2511         CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
2512           __func__, frame_number, pending.size());
2513 
2514         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2515         if (NULL == pStream_Buf) {
2516             ALOGE("%s: No memory for pending buffers array", __func__);
2517             pthread_mutex_unlock(&mMutex);
2518             return NO_MEMORY;
2519         }
2520 
2521         for (size_t j = 0; j < pending.size(); j++) {
2522             const PendingBufferInfo &info = pending.itemAt(j);
2523             notify_msg.type = CAMERA3_MSG_ERROR;
2524             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2525             notify_msg.message.error.error_stream = info.stream;
2526             notify_msg.message.error.frame_number = frame_number;
2527             pStream_Buf[j].acquire_fence = -1;
2528             pStream_Buf[j].release_fence = -1;
2529             pStream_Buf[j].buffer = info.buffer;
2530             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2531             pStream_Buf[j].stream = info.stream;
2532             mCallbackOps->notify(mCallbackOps, &notify_msg);
2533             CDBG("%s: notify frame_number = %d stream %p", __func__,
2534                     frame_number, info.stream);
2535         }
2536 
2537         result.result = NULL;
2538         result.frame_number = frame_number;
2539         result.num_output_buffers = pending.size();
2540         result.output_buffers = pStream_Buf;
2541         mCallbackOps->process_capture_result(mCallbackOps, &result);
2542 
2543         delete [] pStream_Buf;
2544     }
2545 
2546     CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
2547 
2548     flushMap.clear();
2549     for (List<PendingBufferInfo>::iterator k =
2550             mPendingBuffersMap.mPendingBufferList.begin();
2551             k != mPendingBuffersMap.mPendingBufferList.end();) {
2552         ssize_t idx = flushMap.indexOfKey(k->frame_number);
2553         if (idx == NAME_NOT_FOUND) {
2554             Vector<PendingBufferInfo> pending;
2555             pending.add(*k);
2556             flushMap.add(k->frame_number, pending);
2557         } else {
2558             Vector<PendingBufferInfo> &pending =
2559                     flushMap.editValueFor(k->frame_number);
2560             pending.add(*k);
2561         }
2562 
2563         mPendingBuffersMap.num_buffers--;
2564         k = mPendingBuffersMap.mPendingBufferList.erase(k);
2565     }
2566 
2567     // Go through the pending requests info and send error request to framework
2568     for (size_t i = 0; i < flushMap.size(); i++) {
2569         uint32_t frame_number = flushMap.keyAt(i);
2570         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2571         CDBG("%s:Sending ERROR REQUEST for frame %d",
2572               __func__, frame_number);
2573 
2574         // Send shutter notify to frameworks
2575         notify_msg.type = CAMERA3_MSG_ERROR;
2576         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
2577         notify_msg.message.error.error_stream = NULL;
2578         notify_msg.message.error.frame_number = frame_number;
2579         mCallbackOps->notify(mCallbackOps, &notify_msg);
2580 
2581         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2582         if (NULL == pStream_Buf) {
2583             ALOGE("%s: No memory for pending buffers array", __func__);
2584             pthread_mutex_unlock(&mMutex);
2585             return NO_MEMORY;
2586         }
2587 
2588         for (size_t j = 0; j < pending.size(); j++) {
2589             const PendingBufferInfo &info = pending.itemAt(j);
2590             pStream_Buf[j].acquire_fence = -1;
2591             pStream_Buf[j].release_fence = -1;
2592             pStream_Buf[j].buffer = info.buffer;
2593             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2594             pStream_Buf[j].stream = info.stream;
2595         }
2596 
2597         result.num_output_buffers = pending.size();
2598         result.output_buffers = pStream_Buf;
2599         result.result = NULL;
2600         result.frame_number = frame_number;
2601         mCallbackOps->process_capture_result(mCallbackOps, &result);
2602         delete [] pStream_Buf;
2603     }
2604 
2605     /* Reset pending buffer list and requests list */
2606     mPendingRequestsList.clear();
2607     /* Reset pending frame Drop list and requests list */
2608     mPendingFrameDropList.clear();
2609 
2610     flushMap.clear();
2611     mPendingBuffersMap.num_buffers = 0;
2612     mPendingBuffersMap.mPendingBufferList.clear();
2613     mPendingReprocessResultList.clear();
2614     CDBG("%s: Cleared all the pending buffers ", __func__);
2615 
2616     mFlush = false;
2617 
2618     // Start the Streams/Channels
2619     int rc = NO_ERROR;
2620     if (mMetadataChannel) {
2621         /* If content of mStreamInfo is not 0, there is metadata stream */
2622         rc = mMetadataChannel->start();
2623         if (rc < 0) {
2624             ALOGE("%s: META channel start failed", __func__);
2625             pthread_mutex_unlock(&mMutex);
2626             return rc;
2627         }
2628     }
2629     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2630         it != mStreamInfo.end(); it++) {
2631         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2632         rc = channel->start();
2633         if (rc < 0) {
2634             ALOGE("%s: channel start failed", __func__);
2635             pthread_mutex_unlock(&mMutex);
2636             return rc;
2637         }
2638     }
2639     if (mSupportChannel) {
2640         rc = mSupportChannel->start();
2641         if (rc < 0) {
2642             ALOGE("%s: Support channel start failed", __func__);
2643             pthread_mutex_unlock(&mMutex);
2644             return rc;
2645         }
2646     }
2647     if (mRawDumpChannel) {
2648         rc = mRawDumpChannel->start();
2649         if (rc < 0) {
2650             ALOGE("%s: RAW dump channel start failed", __func__);
2651             pthread_mutex_unlock(&mMutex);
2652             return rc;
2653         }
2654     }
2655 
2656     pthread_mutex_unlock(&mMutex);
2657 
2658     return 0;
2659 }
2660 
2661 /*===========================================================================
2662  * FUNCTION   : captureResultCb
2663  *
2664  * DESCRIPTION: Callback handler for all capture result
2665  *              (streams, as well as metadata)
2666  *
2667  * PARAMETERS :
2668  *   @metadata : metadata information
2669  *   @buffer   : actual gralloc buffer to be returned to frameworks.
2670  *               NULL if metadata.
2671  *
2672  * RETURN     : NONE
2673  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number)2674 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
2675                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2676 {
2677     pthread_mutex_lock(&mMutex);
2678 
2679     /* Assume flush() is called before any reprocessing. Send
2680      * notify and result immediately upon receipt of any callback*/
2681     if (mLoopBackResult) {
2682         /* Send notify */
2683         camera3_notify_msg_t notify_msg;
2684         notify_msg.type = CAMERA3_MSG_SHUTTER;
2685         notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
2686         notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
2687         mCallbackOps->notify(mCallbackOps, &notify_msg);
2688 
2689         /* Send capture result */
2690         mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
2691         free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
2692         free(mLoopBackResult);
2693         mLoopBackResult = NULL;
2694     }
2695 
2696     if (metadata_buf)
2697         handleMetadataWithLock(metadata_buf);
2698     else
2699         handleBufferWithLock(buffer, frame_number);
2700     pthread_mutex_unlock(&mMutex);
2701     return;
2702 }
2703 
2704 /*===========================================================================
2705  * FUNCTION   : translateFromHalMetadata
2706  *
2707  * DESCRIPTION:
2708  *
2709  * PARAMETERS :
2710  *   @metadata : metadata information from callback
2711  *   @timestamp: metadata buffer timestamp
2712  *   @request_id: request id
2713  *   @jpegMetadata: additional jpeg metadata
2714  *
2715  * RETURN     : camera_metadata_t*
2716  *              metadata in a format specified by fwk
2717  *==========================================================================*/
2718 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent)2719 QCamera3HardwareInterface::translateFromHalMetadata(
2720                                  metadata_buffer_t *metadata,
2721                                  nsecs_t timestamp,
2722                                  int32_t request_id,
2723                                  const CameraMetadata& jpegMetadata,
2724                                  uint8_t pipeline_depth,
2725                                  uint8_t capture_intent)
2726 {
2727     CameraMetadata camMetadata;
2728     camera_metadata_t* resultMetadata;
2729 
2730     if (jpegMetadata.entryCount())
2731         camMetadata.append(jpegMetadata);
2732 
2733     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
2734     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
2735     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
2736     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
2737 
2738     if (IS_META_AVAILABLE(CAM_INTF_META_FRAME_NUMBER, metadata)) {
2739         int64_t frame_number = *(uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2740         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
2741     }
2742 
2743 
2744     if (IS_META_AVAILABLE(CAM_INTF_PARM_FPS_RANGE, metadata)) {
2745         int32_t fps_range[2];
2746         cam_fps_range_t * float_range =
2747           (cam_fps_range_t *)POINTER_OF_PARAM(CAM_INTF_PARM_FPS_RANGE, metadata);
2748         fps_range[0] = (int32_t)float_range->min_fps;
2749         fps_range[1] = (int32_t)float_range->max_fps;
2750         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
2751                                       fps_range, 2);
2752         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
2753             __func__, fps_range[0], fps_range[1]);
2754     }
2755 
2756 
2757     if (IS_META_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata)) {
2758         int32_t  *expCompensation =
2759           (int32_t *)POINTER_OF_META(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2760         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2761                                       expCompensation, 1);
2762     }
2763 
2764     if (IS_META_AVAILABLE(CAM_INTF_PARM_BESTSHOT_MODE, metadata)) {
2765         uint8_t sceneMode =
2766                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_BESTSHOT_MODE, metadata));
2767         uint8_t fwkSceneMode =
2768             (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2769             sizeof(SCENE_MODES_MAP)/
2770             sizeof(SCENE_MODES_MAP[0]), sceneMode);
2771         camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2772              &fwkSceneMode, 1);
2773     }
2774 
2775     if (IS_META_AVAILABLE(CAM_INTF_PARM_AEC_LOCK, metadata)) {
2776         uint8_t  ae_lock =
2777                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AEC_LOCK, metadata));
2778         camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2779                 &ae_lock, 1);
2780     }
2781 
2782     if (IS_META_AVAILABLE(CAM_INTF_PARM_AWB_LOCK, metadata)) {
2783         uint8_t awb_lock =
2784                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AWB_LOCK, metadata));
2785         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
2786     }
2787 
2788     if (IS_META_AVAILABLE(CAM_INTF_META_FACE_DETECTION, metadata)){
2789         cam_face_detection_data_t *faceDetectionInfo =
2790             (cam_face_detection_data_t *)POINTER_OF_META(CAM_INTF_META_FACE_DETECTION, metadata);
2791         uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2792         int32_t faceIds[MAX_ROI];
2793         uint8_t faceScores[MAX_ROI];
2794         int32_t faceRectangles[MAX_ROI * 4];
2795         int32_t faceLandmarks[MAX_ROI * 6];
2796         int j = 0, k = 0;
2797         for (int i = 0; i < numFaces; i++) {
2798             faceIds[i] = faceDetectionInfo->faces[i].face_id;
2799             faceScores[i] = faceDetectionInfo->faces[i].score;
2800             convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2801                 faceRectangles+j, -1);
2802             convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2803             j+= 4;
2804             k+= 6;
2805         }
2806         if (numFaces <= 0) {
2807             memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2808             memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2809             memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2810             memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2811         }
2812         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2813         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2814         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2815             faceRectangles, numFaces*4);
2816         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2817             faceLandmarks, numFaces*6);
2818     }
2819     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_MODE, metadata)){
2820         uint8_t  *color_correct_mode =
2821             (uint8_t *)POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2822         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2823     }
2824     if (IS_META_AVAILABLE(CAM_INTF_META_EDGE_MODE, metadata)) {
2825         cam_edge_application_t  *edgeApplication =
2826             (cam_edge_application_t *)POINTER_OF_META(CAM_INTF_META_EDGE_MODE, metadata);
2827         uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2828         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2829         camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2830     }
2831     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_POWER, metadata)) {
2832         uint8_t  *flashPower =
2833             (uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_POWER, metadata);
2834         camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2835     }
2836     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_FIRING_TIME, metadata)) {
2837         int64_t  *flashFiringTime =
2838             (int64_t *)POINTER_OF_META(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2839         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2840     }
2841     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_STATE, metadata)) {
2842         uint8_t  flashState =
2843             *((uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_STATE, metadata));
2844         if (!gCamCapability[mCameraId]->flash_available) {
2845             flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2846         }
2847         camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2848     }
2849     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_MODE, metadata)){
2850         uint8_t flashMode = *((uint8_t*)
2851             POINTER_OF_META(CAM_INTF_META_FLASH_MODE, metadata));
2852         uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2853             sizeof(FLASH_MODES_MAP), flashMode);
2854         camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2855     }
2856     if (IS_META_AVAILABLE(CAM_INTF_META_HOTPIXEL_MODE, metadata)) {
2857         uint8_t  *hotPixelMode =
2858             (uint8_t *)POINTER_OF_META(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2859         camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2860     }
2861     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_APERTURE, metadata)){
2862         float  *lensAperture =
2863             (float *)POINTER_OF_META(CAM_INTF_META_LENS_APERTURE, metadata);
2864         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2865     }
2866     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FILTERDENSITY, metadata)) {
2867         float  *filterDensity =
2868             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2869         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2870     }
2871     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)){
2872         float  *focalLength =
2873             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2874         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2875     }
2876 
2877     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata)) {
2878         uint8_t  *opticalStab =
2879             (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2880         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2881     }
2882     if (IS_META_AVAILABLE(CAM_INTF_PARM_DIS_ENABLE, metadata)) {
2883         uint8_t *vsMode =
2884             (uint8_t *)POINTER_OF_META(CAM_INTF_PARM_DIS_ENABLE, metadata);
2885         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, vsMode, 1);
2886     }
2887 
2888     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
2889         uint8_t  *noiseRedMode =
2890             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2891         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2892     }
2893     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata)) {
2894         uint8_t  *noiseRedStrength =
2895             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2896         camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2897     }
2898     if (IS_META_AVAILABLE(CAM_INTF_META_SCALER_CROP_REGION, metadata)) {
2899         cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2900             POINTER_OF_META(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2901         int32_t scalerCropRegion[4];
2902         scalerCropRegion[0] = hScalerCropRegion->left;
2903         scalerCropRegion[1] = hScalerCropRegion->top;
2904         scalerCropRegion[2] = hScalerCropRegion->width;
2905         scalerCropRegion[3] = hScalerCropRegion->height;
2906         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2907     }
2908     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)){
2909         int64_t  *sensorExpTime =
2910             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2911         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2912         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2913     }
2914     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata)){
2915         int64_t  *sensorFameDuration =
2916             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2917         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2918         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2919     }
2920     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata)){
2921         int64_t  *sensorRollingShutterSkew =
2922             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
2923                 metadata);
2924         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
2925         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2926                 sensorRollingShutterSkew, 1);
2927     }
2928 
2929     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)){
2930         int32_t sensorSensitivity =
2931             *((int32_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
2932         CDBG("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
2933         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
2934 
2935         //calculate the noise profile based on sensitivity
2936         double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
2937         double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
2938         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
2939         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i +=2) {
2940            noise_profile[i]   = noise_profile_S;
2941            noise_profile[i+1] = noise_profile_O;
2942         }
2943         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
2944              noise_profile_S, noise_profile_O);
2945         camMetadata.update( ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
2946                             2 * gCamCapability[mCameraId]->num_color_channels);
2947     }
2948 
2949 
2950     if (IS_META_AVAILABLE(CAM_INTF_META_SHADING_MODE, metadata)) {
2951         uint8_t  *shadingMode =
2952             (uint8_t *)POINTER_OF_META(CAM_INTF_META_SHADING_MODE, metadata);
2953      camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2954     }
2955     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata)) {
2956         uint8_t  *faceDetectMode =
2957             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2958         uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2959             sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), *faceDetectMode);
2960         camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2961     }
2962     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata)) {
2963         uint8_t  *histogramMode =
2964             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2965          camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2966     }
2967     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata)){
2968        uint8_t  *sharpnessMapMode =
2969           (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2970        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2971                           sharpnessMapMode, 1);
2972     }
2973     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata)){
2974        cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2975        POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2976        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2977                           (int32_t*)sharpnessMap->sharpness,
2978                           CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2979     }
2980     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP, metadata)) {
2981        cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2982        POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2983        int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2984        int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2985        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2986                           (float*)lensShadingMap->lens_shading,
2987                           4*map_width*map_height);
2988     }
2989     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_MODE, metadata)) {
2990         uint8_t  *toneMapMode =
2991             (uint8_t *)POINTER_OF_META(CAM_INTF_META_TONEMAP_MODE, metadata);
2992         camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2993     }
2994     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_CURVES, metadata)){
2995         //Populate CAM_INTF_META_TONEMAP_CURVES
2996         /* ch0 = G, ch 1 = B, ch 2 = R*/
2997         cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2998                 POINTER_OF_META(CAM_INTF_META_TONEMAP_CURVES, metadata);
2999         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3000             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3001                     __func__, tonemap->tonemap_points_cnt,
3002                     CAM_MAX_TONEMAP_CURVE_SIZE);
3003             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3004         }
3005 
3006         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
3007                         (float*)tonemap->curves[0].tonemap_points,
3008                         tonemap->tonemap_points_cnt * 2);
3009 
3010         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
3011                         (float*)tonemap->curves[1].tonemap_points,
3012                         tonemap->tonemap_points_cnt * 2);
3013 
3014         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
3015                         (float*)tonemap->curves[2].tonemap_points,
3016                         tonemap->tonemap_points_cnt * 2);
3017     }
3018     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata)){
3019         cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
3020             POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
3021         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
3022     }
3023     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata)){
3024         cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
3025         POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
3026         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
3027             (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
3028     }
3029     if (IS_META_AVAILABLE(CAM_INTF_META_PROFILE_TONE_CURVE, metadata)) {
3030         cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
3031                 POINTER_OF_META(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
3032         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3033             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3034                     __func__, toneCurve->tonemap_points_cnt,
3035                     CAM_MAX_TONEMAP_CURVE_SIZE);
3036             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3037         }
3038         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
3039                 (float*)toneCurve->curve.tonemap_points,
3040                 toneCurve->tonemap_points_cnt * 2);
3041     }
3042     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata)){
3043         cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
3044             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
3045         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
3046             predColorCorrectionGains->gains, 4);
3047     }
3048     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata)){
3049         cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
3050             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
3051         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3052             (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
3053     }
3054     if (IS_META_AVAILABLE(CAM_INTF_META_OTP_WB_GRGB, metadata)) {
3055         float *otpWbGrGb = (float*) POINTER_OF_META(
3056                 CAM_INTF_META_OTP_WB_GRGB, metadata);
3057         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
3058     }
3059     if (IS_META_AVAILABLE(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata)){
3060         uint8_t *blackLevelLock = (uint8_t*)
3061             POINTER_OF_META(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
3062         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
3063     }
3064     if (IS_META_AVAILABLE(CAM_INTF_META_SCENE_FLICKER, metadata)){
3065         uint8_t *sceneFlicker = (uint8_t*)
3066             POINTER_OF_META(CAM_INTF_META_SCENE_FLICKER, metadata);
3067         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
3068     }
3069     if (IS_META_AVAILABLE(CAM_INTF_PARM_EFFECT, metadata)) {
3070         uint8_t *effectMode = (uint8_t*)
3071             POINTER_OF_META(CAM_INTF_PARM_EFFECT, metadata);
3072         uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
3073                                             sizeof(EFFECT_MODES_MAP),
3074                                             *effectMode);
3075         camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
3076     }
3077     if (IS_META_AVAILABLE(CAM_INTF_META_TEST_PATTERN_DATA, metadata)) {
3078         cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
3079             POINTER_OF_META(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
3080         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
3081                 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3082                 testPatternData->mode);
3083         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
3084                 &fwk_testPatternMode, 1);
3085         int32_t fwk_testPatternData[4];
3086         fwk_testPatternData[0] = testPatternData->r;
3087         fwk_testPatternData[3] = testPatternData->b;
3088         switch (gCamCapability[mCameraId]->color_arrangement) {
3089         case CAM_FILTER_ARRANGEMENT_RGGB:
3090         case CAM_FILTER_ARRANGEMENT_GRBG:
3091             fwk_testPatternData[1] = testPatternData->gr;
3092             fwk_testPatternData[2] = testPatternData->gb;
3093             break;
3094         case CAM_FILTER_ARRANGEMENT_GBRG:
3095         case CAM_FILTER_ARRANGEMENT_BGGR:
3096             fwk_testPatternData[2] = testPatternData->gr;
3097             fwk_testPatternData[1] = testPatternData->gb;
3098             break;
3099         default:
3100             ALOGE("%s: color arrangement %d is not supported", __func__,
3101                 gCamCapability[mCameraId]->color_arrangement);
3102             break;
3103         }
3104         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
3105     }
3106     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
3107         double *gps_coords = (double *)POINTER_OF_META(
3108             CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
3109         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
3110     }
3111     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
3112         char *gps_methods = (char *)POINTER_OF_META(
3113             CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
3114         String8 str(gps_methods);
3115         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
3116     }
3117     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
3118         int64_t *gps_timestamp = (int64_t *)POINTER_OF_META(
3119                 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
3120         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
3121     }
3122     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
3123         int32_t *jpeg_orientation = (int32_t *)POINTER_OF_META(
3124                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
3125         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
3126     }
3127     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
3128         uint8_t *jpeg_quality = (uint8_t *)POINTER_OF_META(
3129                 CAM_INTF_META_JPEG_QUALITY, metadata);
3130         camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
3131     }
3132     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
3133         uint8_t *thumb_quality = (uint8_t *)POINTER_OF_META(
3134                 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
3135         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
3136     }
3137     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
3138         cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF_META(
3139                 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
3140     }
3141     if (IS_META_AVAILABLE(CAM_INTF_META_PRIVATE_DATA, metadata)) {
3142         int32_t *privateData = (int32_t *)
3143                 POINTER_OF_META(CAM_INTF_META_PRIVATE_DATA, metadata);
3144         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
3145                 privateData, MAX_METADATA_PRIVATE_PAYLOAD_SIZE);
3146     }
3147     if (metadata->is_tuning_params_valid) {
3148         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
3149         uint8_t *data = (uint8_t*)&tuning_meta_data_blob[0];
3150         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
3151 
3152 
3153         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_data_version),
3154                 sizeof(uint32_t));
3155         data += sizeof(uint32_t);
3156 
3157         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_sensor_data_size),
3158                 sizeof(uint32_t));
3159         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3160         data += sizeof(uint32_t);
3161 
3162         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
3163                 sizeof(uint32_t));
3164         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3165         data += sizeof(uint32_t);
3166 
3167         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
3168                 sizeof(uint32_t));
3169         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3170         data += sizeof(uint32_t);
3171 
3172         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
3173                 sizeof(uint32_t));
3174         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3175         data += sizeof(uint32_t);
3176 
3177         metadata->tuning_params.tuning_mod3_data_size = 0;
3178         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
3179                 sizeof(uint32_t));
3180         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3181         data += sizeof(uint32_t);
3182 
3183         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
3184                 metadata->tuning_params.tuning_sensor_data_size);
3185         data += metadata->tuning_params.tuning_sensor_data_size;
3186 
3187         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
3188                 metadata->tuning_params.tuning_vfe_data_size);
3189         data += metadata->tuning_params.tuning_vfe_data_size;
3190 
3191         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
3192                 metadata->tuning_params.tuning_cpp_data_size);
3193         data += metadata->tuning_params.tuning_cpp_data_size;
3194 
3195 
3196         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
3197                 metadata->tuning_params.tuning_cac_data_size);
3198         data += metadata->tuning_params.tuning_cac_data_size;
3199 
3200         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
3201             (int32_t*)tuning_meta_data_blob, (data-tuning_meta_data_blob)/sizeof(uint32_t));
3202     }
3203     if (IS_META_AVAILABLE(CAM_INTF_META_NEUTRAL_COL_POINT, metadata)) {
3204         cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
3205                 POINTER_OF_META(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
3206         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3207                 (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
3208     }
3209 
3210     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata)) {
3211          uint8_t  shadingMapMode =
3212                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata));
3213          camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
3214     }
3215 
3216     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_ROI, metadata)) {
3217         cam_area_t  *hAeRegions =
3218                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AEC_ROI, metadata);
3219         int32_t aeRegions[5];
3220         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
3221         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
3222         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3223                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
3224                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
3225                 hAeRegions->rect.height);
3226     }
3227 
3228     if (IS_META_AVAILABLE(CAM_INTF_META_AF_ROI, metadata)) {
3229         /*af regions*/
3230         cam_area_t  *hAfRegions =
3231                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AF_ROI, metadata);
3232         int32_t afRegions[5];
3233         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
3234         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
3235         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3236                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
3237                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
3238                 hAfRegions->rect.height);
3239     }
3240 
3241     if (IS_META_AVAILABLE(CAM_INTF_PARM_ANTIBANDING, metadata)) {
3242         uint8_t hal_ab_mode =
3243                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_ANTIBANDING, metadata));
3244         uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
3245                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3246                 hal_ab_mode);
3247         camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
3248                 &fwk_ab_mode, 1);
3249     }
3250 
3251     if (IS_META_AVAILABLE(CAM_INTF_META_MODE, metadata)) {
3252          uint8_t mode =
3253                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_MODE, metadata));
3254          camMetadata.update(ANDROID_CONTROL_MODE, &mode, 1);
3255     }
3256 
3257     /* Constant metadata values to be update*/
3258     uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
3259     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
3260 
3261     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
3262     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
3263 
3264     int32_t hotPixelMap[2];
3265     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
3266 
3267     uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3268     camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3269 
3270     // CDS
3271     if (IS_META_AVAILABLE(CAM_INTF_PARM_CDS_MODE, metadata)) {
3272         cam_cds_mode_type_t *cds = (cam_cds_mode_type_t *)
3273                 POINTER_OF_META(CAM_INTF_PARM_CDS_MODE, metadata);
3274         int32_t mode = *cds;
3275         camMetadata.update(QCAMERA3_CDS_MODE,
3276                 &mode, 1);
3277     }
3278 
3279     // Reprocess crop data
3280     if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, metadata)) {
3281         cam_crop_data_t *crop_data = (cam_crop_data_t *)
3282                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, metadata);
3283         uint8_t cnt = crop_data->num_of_streams;
3284         if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
3285             int rc = NO_ERROR;
3286             int32_t *crop = new int32_t[cnt*4];
3287             if (NULL == crop) {
3288                 rc = NO_MEMORY;
3289             }
3290 
3291             int32_t *crop_stream_ids = new int32_t[cnt];
3292             if (NULL == crop_stream_ids) {
3293                 rc = NO_MEMORY;
3294             }
3295 
3296             if (NO_ERROR == rc) {
3297                 int32_t steams_found = 0;
3298                 for (size_t i = 0; i < cnt; i++) {
3299                     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3300                         it != mStreamInfo.end(); it++) {
3301                         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3302                         if (NULL != channel) {
3303                             if (crop_data->crop_info[i].stream_id ==
3304                                     channel->mStreams[0]->getMyServerID()) {
3305                                 crop[steams_found*4] = crop_data->crop_info[i].crop.left;
3306                                 crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top;
3307                                 crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width;
3308                                 crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height;
3309                                 // In a more general case we may want to generate
3310                                 // unique id depending on width, height, stream, private
3311                                 // data etc.
3312                                 crop_stream_ids[steams_found] = (int32_t)(*it)->stream;
3313                                 steams_found++;
3314                                 CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
3315                                         __func__,
3316                                         (*it)->stream,
3317                                         crop_data->crop_info[i].crop.left,
3318                                         crop_data->crop_info[i].crop.top,
3319                                         crop_data->crop_info[i].crop.width,
3320                                         crop_data->crop_info[i].crop.height);
3321                                 break;
3322                             }
3323                         }
3324                     }
3325                 }
3326 
3327                 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
3328                         &steams_found, 1);
3329                 camMetadata.update(QCAMERA3_CROP_REPROCESS,
3330                         crop, steams_found*4);
3331                 camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS,
3332                         crop_stream_ids, steams_found);
3333             }
3334 
3335             if (crop) {
3336                 delete [] crop;
3337             }
3338             if (crop_stream_ids) {
3339                 delete [] crop_stream_ids;
3340             }
3341         } else {
3342             // mm-qcamera-daemon only posts crop_data for streams
3343             // not linked to pproc. So no valid crop metadata is not
3344             // necessarily an error case.
3345             CDBG("%s: No valid crop metadata entries", __func__);
3346         }
3347     }
3348 
3349     if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_CAC, metadata)) {
3350         cam_aberration_mode_t  *cacMode = (cam_aberration_mode_t *)
3351                 POINTER_OF_PARAM(CAM_INTF_PARM_CAC, metadata);
3352         int32_t cac = lookupFwkName(COLOR_ABERRATION_MAP,
3353                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
3354                 *cacMode);
3355         if (NAME_NOT_FOUND != cac) {
3356             uint8_t val = (uint8_t) cac;
3357             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
3358                     &val,
3359                     1);
3360         } else {
3361             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
3362         }
3363     }
3364 
3365     resultMetadata = camMetadata.release();
3366     return resultMetadata;
3367 }
3368 
3369 /*===========================================================================
3370  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
3371  *
3372  * DESCRIPTION:
3373  *
3374  * PARAMETERS :
3375  *   @metadata : metadata information from callback
3376  *
3377  * RETURN     : camera_metadata_t*
3378  *              metadata in a format specified by fwk
3379  *==========================================================================*/
3380 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)3381 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
3382                                 (metadata_buffer_t *metadata)
3383 {
3384     CameraMetadata camMetadata;
3385     camera_metadata_t* resultMetadata;
3386     uint8_t aeMode = CAM_AE_MODE_MAX;
3387     int32_t *flashMode = NULL;
3388     int32_t *redeye = NULL;
3389 
3390     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_STATE, metadata)) {
3391         uint8_t *ae_state = (uint8_t *)
3392             POINTER_OF_META(CAM_INTF_META_AEC_STATE, metadata);
3393         camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
3394         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
3395     }
3396 
3397     if (IS_META_AVAILABLE(CAM_INTF_META_AF_STATE, metadata)) {
3398         uint8_t  *afState = (uint8_t *)
3399             POINTER_OF_META(CAM_INTF_META_AF_STATE, metadata);
3400         camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
3401         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %d", __func__, *afState);
3402     }
3403 
3404     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata)) {
3405         float  *focusDistance =
3406             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
3407         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
3408     }
3409 
3410     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_RANGE, metadata)) {
3411         float  *focusRange =
3412             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
3413         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
3414     }
3415 
3416     if (IS_META_AVAILABLE(CAM_INTF_META_AWB_STATE, metadata)) {
3417         uint8_t  *whiteBalanceState = (uint8_t *)
3418             POINTER_OF_META(CAM_INTF_META_AWB_STATE, metadata);
3419         camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
3420         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
3421     }
3422 
3423     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata)) {
3424         cam_trigger_t *aecTrigger =
3425                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata);
3426         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
3427                 &aecTrigger->trigger, 1);
3428         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
3429                 &aecTrigger->trigger_id, 1);
3430     }
3431 
3432     if (IS_META_AVAILABLE(CAM_INTF_PARM_FOCUS_MODE, metadata)) {
3433         uint8_t  *focusMode = (uint8_t *)
3434             POINTER_OF_META(CAM_INTF_PARM_FOCUS_MODE, metadata);
3435         uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3436             sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
3437         camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
3438     }
3439 
3440     if (IS_META_AVAILABLE(CAM_INTF_META_AF_TRIGGER, metadata)) {
3441         cam_trigger_t *af_trigger =
3442                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AF_TRIGGER, metadata);
3443         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
3444                 &af_trigger->trigger, 1);
3445         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
3446     }
3447 
3448     if (IS_META_AVAILABLE(CAM_INTF_PARM_WHITE_BALANCE, metadata)) {
3449         uint8_t  *whiteBalance = (uint8_t *)
3450             POINTER_OF_META(CAM_INTF_PARM_WHITE_BALANCE, metadata);
3451         uint8_t fwkWhiteBalanceMode =
3452             (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3453                 sizeof(WHITE_BALANCE_MODES_MAP)/
3454                 sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
3455         camMetadata.update(ANDROID_CONTROL_AWB_MODE,
3456             &fwkWhiteBalanceMode, 1);
3457     }
3458 
3459     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_MODE, metadata)) {
3460         aeMode = *((uint32_t*) POINTER_OF_META(CAM_INTF_META_AEC_MODE, metadata));
3461     }
3462     if (IS_META_AVAILABLE(CAM_INTF_PARM_LED_MODE, metadata)) {
3463         flashMode = (int32_t*)
3464                 POINTER_OF_PARAM(CAM_INTF_PARM_LED_MODE, metadata);
3465     }
3466     if (IS_META_AVAILABLE(CAM_INTF_PARM_REDEYE_REDUCTION, metadata)) {
3467         redeye = (int32_t*)
3468                 POINTER_OF_PARAM(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
3469     }
3470 
3471     uint8_t fwk_aeMode;
3472     if (redeye != NULL && *redeye == 1) {
3473         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3474         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3475     } else if (flashMode != NULL &&
3476             ((*flashMode == CAM_FLASH_MODE_AUTO)||
3477              (*flashMode == CAM_FLASH_MODE_ON))) {
3478         fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
3479                 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
3480         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3481     } else if (aeMode == CAM_AE_MODE_ON) {
3482         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
3483         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3484     } else if (aeMode == CAM_AE_MODE_OFF) {
3485         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
3486         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3487     } else {
3488         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%d!!!",__func__,
3489               redeye, flashMode, aeMode);
3490     }
3491 
3492     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_STATE, metadata)) {
3493         uint8_t *lensState = (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_STATE, metadata);
3494         camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
3495     }
3496 
3497     resultMetadata = camMetadata.release();
3498     return resultMetadata;
3499 }
3500 
3501 /*===========================================================================
3502  * FUNCTION   : dumpMetadataToFile
3503  *
3504  * DESCRIPTION: Dumps tuning metadata to file system
3505  *
3506  * PARAMETERS :
3507  *   @meta           : tuning metadata
3508  *   @dumpFrameCount : current dump frame count
3509  *   @enabled        : Enable mask
3510  *
3511  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,int32_t enabled,const char * type,uint32_t frameNumber)3512 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
3513                                                    uint32_t &dumpFrameCount,
3514                                                    int32_t enabled,
3515                                                    const char *type,
3516                                                    uint32_t frameNumber)
3517 {
3518     uint32_t frm_num = 0;
3519 
3520     //Some sanity checks
3521     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
3522         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
3523               __func__,
3524               meta.tuning_sensor_data_size,
3525               TUNING_SENSOR_DATA_MAX);
3526         return;
3527     }
3528 
3529     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
3530         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
3531               __func__,
3532               meta.tuning_vfe_data_size,
3533               TUNING_VFE_DATA_MAX);
3534         return;
3535     }
3536 
3537     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
3538         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
3539               __func__,
3540               meta.tuning_cpp_data_size,
3541               TUNING_CPP_DATA_MAX);
3542         return;
3543     }
3544 
3545     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
3546         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
3547               __func__,
3548               meta.tuning_cac_data_size,
3549               TUNING_CAC_DATA_MAX);
3550         return;
3551     }
3552     //
3553 
3554     if(enabled){
3555         char timeBuf[FILENAME_MAX];
3556         char buf[FILENAME_MAX];
3557         memset(buf, 0, sizeof(buf));
3558         memset(timeBuf, 0, sizeof(timeBuf));
3559         time_t current_time;
3560         struct tm * timeinfo;
3561         time (&current_time);
3562         timeinfo = localtime (&current_time);
3563         strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
3564         String8 filePath(timeBuf);
3565         snprintf(buf,
3566                 sizeof(buf),
3567                 "%dm_%s_%d.bin",
3568                 dumpFrameCount,
3569                 type,
3570                 frameNumber);
3571         filePath.append(buf);
3572         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
3573         if (file_fd >= 0) {
3574             int written_len = 0;
3575             meta.tuning_data_version = TUNING_DATA_VERSION;
3576             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
3577             written_len += write(file_fd, data, sizeof(uint32_t));
3578             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
3579             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3580             written_len += write(file_fd, data, sizeof(uint32_t));
3581             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
3582             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3583             written_len += write(file_fd, data, sizeof(uint32_t));
3584             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
3585             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3586             written_len += write(file_fd, data, sizeof(uint32_t));
3587             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
3588             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3589             written_len += write(file_fd, data, sizeof(uint32_t));
3590             meta.tuning_mod3_data_size = 0;
3591             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
3592             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3593             written_len += write(file_fd, data, sizeof(uint32_t));
3594             int total_size = meta.tuning_sensor_data_size;
3595             data = (void *)((uint8_t *)&meta.data);
3596             written_len += write(file_fd, data, total_size);
3597             total_size = meta.tuning_vfe_data_size;
3598             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
3599             written_len += write(file_fd, data, total_size);
3600             total_size = meta.tuning_cpp_data_size;
3601             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
3602             written_len += write(file_fd, data, total_size);
3603             total_size = meta.tuning_cac_data_size;
3604             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
3605             written_len += write(file_fd, data, total_size);
3606             close(file_fd);
3607         }else {
3608             ALOGE("%s: fail to open file for metadata dumping", __func__);
3609         }
3610     }
3611 }
3612 
3613 /*===========================================================================
3614  * FUNCTION   : cleanAndSortStreamInfo
3615  *
3616  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
3617  *              and sort them such that raw stream is at the end of the list
3618  *              This is a workaround for camera daemon constraint.
3619  *
3620  * PARAMETERS : None
3621  *
3622  *==========================================================================*/
cleanAndSortStreamInfo()3623 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
3624 {
3625     List<stream_info_t *> newStreamInfo;
3626 
3627     /*clean up invalid streams*/
3628     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
3629             it != mStreamInfo.end();) {
3630         if(((*it)->status) == INVALID){
3631             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
3632             delete channel;
3633             free(*it);
3634             it = mStreamInfo.erase(it);
3635         } else {
3636             it++;
3637         }
3638     }
3639 
3640     // Move preview/video/callback/snapshot streams into newList
3641     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3642             it != mStreamInfo.end();) {
3643         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
3644                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
3645                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
3646             newStreamInfo.push_back(*it);
3647             it = mStreamInfo.erase(it);
3648         } else
3649             it++;
3650     }
3651     // Move raw streams into newList
3652     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3653             it != mStreamInfo.end();) {
3654         newStreamInfo.push_back(*it);
3655         it = mStreamInfo.erase(it);
3656     }
3657 
3658     mStreamInfo = newStreamInfo;
3659 }
3660 
3661 /*===========================================================================
3662  * FUNCTION   : extractJpegMetadata
3663  *
3664  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
3665  *              JPEG metadata is cached in HAL, and return as part of capture
3666  *              result when metadata is returned from camera daemon.
3667  *
3668  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
3669  *              @request:      capture request
3670  *
3671  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)3672 void QCamera3HardwareInterface::extractJpegMetadata(
3673         CameraMetadata& jpegMetadata,
3674         const camera3_capture_request_t *request)
3675 {
3676     CameraMetadata frame_settings;
3677     frame_settings = request->settings;
3678 
3679     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
3680         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
3681                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
3682                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
3683 
3684     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
3685         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
3686                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
3687                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
3688 
3689     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
3690         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
3691                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
3692                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
3693 
3694     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
3695         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
3696                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
3697                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
3698 
3699     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
3700         jpegMetadata.update(ANDROID_JPEG_QUALITY,
3701                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
3702                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
3703 
3704     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
3705         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
3706                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
3707                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
3708 
3709     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
3710         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
3711                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
3712                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
3713 }
3714 
3715 /*===========================================================================
3716  * FUNCTION   : convertToRegions
3717  *
3718  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
3719  *
3720  * PARAMETERS :
3721  *   @rect   : cam_rect_t struct to convert
3722  *   @region : int32_t destination array
3723  *   @weight : if we are converting from cam_area_t, weight is valid
3724  *             else weight = -1
3725  *
3726  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)3727 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
3728     region[0] = rect.left;
3729     region[1] = rect.top;
3730     region[2] = rect.left + rect.width;
3731     region[3] = rect.top + rect.height;
3732     if (weight > -1) {
3733         region[4] = weight;
3734     }
3735 }
3736 
3737 /*===========================================================================
3738  * FUNCTION   : convertFromRegions
3739  *
3740  * DESCRIPTION: helper method to convert from array to cam_rect_t
3741  *
3742  * PARAMETERS :
3743  *   @rect   : cam_rect_t struct to convert
3744  *   @region : int32_t destination array
3745  *   @weight : if we are converting from cam_area_t, weight is valid
3746  *             else weight = -1
3747  *
3748  *==========================================================================*/
convertFromRegions(cam_area_t * roi,const camera_metadata_t * settings,uint32_t tag)3749 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
3750                                                    const camera_metadata_t *settings,
3751                                                    uint32_t tag){
3752     CameraMetadata frame_settings;
3753     frame_settings = settings;
3754     int32_t x_min = frame_settings.find(tag).data.i32[0];
3755     int32_t y_min = frame_settings.find(tag).data.i32[1];
3756     int32_t x_max = frame_settings.find(tag).data.i32[2];
3757     int32_t y_max = frame_settings.find(tag).data.i32[3];
3758     roi->weight = frame_settings.find(tag).data.i32[4];
3759     roi->rect.left = x_min;
3760     roi->rect.top = y_min;
3761     roi->rect.width = x_max - x_min;
3762     roi->rect.height = y_max - y_min;
3763 }
3764 
3765 /*===========================================================================
3766  * FUNCTION   : resetIfNeededROI
3767  *
3768  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
3769  *              crop region
3770  *
3771  * PARAMETERS :
3772  *   @roi       : cam_area_t struct to resize
3773  *   @scalerCropRegion : cam_crop_region_t region to compare against
3774  *
3775  *
3776  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)3777 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
3778                                                  const cam_crop_region_t* scalerCropRegion)
3779 {
3780     int32_t roi_x_max = roi->rect.width + roi->rect.left;
3781     int32_t roi_y_max = roi->rect.height + roi->rect.top;
3782     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
3783     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
3784 
3785     /* According to spec weight = 0 is used to indicate roi needs to be disabled
3786      * without having this check the calculations below to validate if the roi
3787      * is inside scalar crop region will fail resulting in the roi not being
3788      * reset causing algorithm to continue to use stale roi window
3789      */
3790     if (roi->weight == 0) {
3791         return true;
3792     }
3793 
3794     if ((roi_x_max < scalerCropRegion->left) ||
3795         // right edge of roi window is left of scalar crop's left edge
3796         (roi_y_max < scalerCropRegion->top)  ||
3797         // bottom edge of roi window is above scalar crop's top edge
3798         (roi->rect.left > crop_x_max) ||
3799         // left edge of roi window is beyond(right) of scalar crop's right edge
3800         (roi->rect.top > crop_y_max)){
3801         // top edge of roi windo is above scalar crop's top edge
3802         return false;
3803     }
3804     if (roi->rect.left < scalerCropRegion->left) {
3805         roi->rect.left = scalerCropRegion->left;
3806     }
3807     if (roi->rect.top < scalerCropRegion->top) {
3808         roi->rect.top = scalerCropRegion->top;
3809     }
3810     if (roi_x_max > crop_x_max) {
3811         roi_x_max = crop_x_max;
3812     }
3813     if (roi_y_max > crop_y_max) {
3814         roi_y_max = crop_y_max;
3815     }
3816     roi->rect.width = roi_x_max - roi->rect.left;
3817     roi->rect.height = roi_y_max - roi->rect.top;
3818     return true;
3819 }
3820 
3821 /*===========================================================================
3822  * FUNCTION   : convertLandmarks
3823  *
3824  * DESCRIPTION: helper method to extract the landmarks from face detection info
3825  *
3826  * PARAMETERS :
3827  *   @face   : cam_rect_t struct to convert
3828  *   @landmarks : int32_t destination array
3829  *
3830  *
3831  *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)3832 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
3833 {
3834     landmarks[0] = face.left_eye_center.x;
3835     landmarks[1] = face.left_eye_center.y;
3836     landmarks[2] = face.right_eye_center.x;
3837     landmarks[3] = face.right_eye_center.y;
3838     landmarks[4] = face.mouth_center.x;
3839     landmarks[5] = face.mouth_center.y;
3840 }
3841 
3842 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
3843 /*===========================================================================
3844  * FUNCTION   : initCapabilities
3845  *
3846  * DESCRIPTION: initialize camera capabilities in static data struct
3847  *
3848  * PARAMETERS :
3849  *   @cameraId  : camera Id
3850  *
3851  * RETURN     : int32_t type of status
3852  *              NO_ERROR  -- success
3853  *              none-zero failure code
3854  *==========================================================================*/
initCapabilities(int cameraId)3855 int QCamera3HardwareInterface::initCapabilities(int cameraId)
3856 {
3857     int rc = 0;
3858     mm_camera_vtbl_t *cameraHandle = NULL;
3859     QCamera3HeapMemory *capabilityHeap = NULL;
3860 
3861     cameraHandle = camera_open(cameraId);
3862     if (!cameraHandle) {
3863         ALOGE("%s: camera_open failed", __func__);
3864         rc = -1;
3865         goto open_failed;
3866     }
3867 
3868     capabilityHeap = new QCamera3HeapMemory();
3869     if (capabilityHeap == NULL) {
3870         ALOGE("%s: creation of capabilityHeap failed", __func__);
3871         goto heap_creation_failed;
3872     }
3873     /* Allocate memory for capability buffer */
3874     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
3875     if(rc != OK) {
3876         ALOGE("%s: No memory for cappability", __func__);
3877         goto allocate_failed;
3878     }
3879 
3880     /* Map memory for capability buffer */
3881     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
3882     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
3883                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
3884                                 capabilityHeap->getFd(0),
3885                                 sizeof(cam_capability_t));
3886     if(rc < 0) {
3887         ALOGE("%s: failed to map capability buffer", __func__);
3888         goto map_failed;
3889     }
3890 
3891     /* Query Capability */
3892     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
3893     if(rc < 0) {
3894         ALOGE("%s: failed to query capability",__func__);
3895         goto query_failed;
3896     }
3897     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
3898     if (!gCamCapability[cameraId]) {
3899         ALOGE("%s: out of memory", __func__);
3900         goto query_failed;
3901     }
3902     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
3903                                         sizeof(cam_capability_t));
3904     rc = 0;
3905 
3906 query_failed:
3907     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
3908                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
3909 map_failed:
3910     capabilityHeap->deallocate();
3911 allocate_failed:
3912     delete capabilityHeap;
3913 heap_creation_failed:
3914     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
3915     cameraHandle = NULL;
3916 open_failed:
3917     return rc;
3918 }
3919 
3920 /*===========================================================================
3921  * FUNCTION   : initParameters
3922  *
3923  * DESCRIPTION: initialize camera parameters
3924  *
3925  * PARAMETERS :
3926  *
3927  * RETURN     : int32_t type of status
3928  *              NO_ERROR  -- success
3929  *              none-zero failure code
3930  *==========================================================================*/
initParameters()3931 int QCamera3HardwareInterface::initParameters()
3932 {
3933     int rc = 0;
3934 
3935     //Allocate Set Param Buffer
3936     mParamHeap = new QCamera3HeapMemory();
3937     rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
3938     if(rc != OK) {
3939         rc = NO_MEMORY;
3940         ALOGE("Failed to allocate SETPARM Heap memory");
3941         delete mParamHeap;
3942         mParamHeap = NULL;
3943         return rc;
3944     }
3945 
3946     //Map memory for parameters buffer
3947     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
3948             CAM_MAPPING_BUF_TYPE_PARM_BUF,
3949             mParamHeap->getFd(0),
3950             sizeof(metadata_buffer_t));
3951     if(rc < 0) {
3952         ALOGE("%s:failed to map SETPARM buffer",__func__);
3953         rc = FAILED_TRANSACTION;
3954         mParamHeap->deallocate();
3955         delete mParamHeap;
3956         mParamHeap = NULL;
3957         return rc;
3958     }
3959 
3960     mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
3961     return rc;
3962 }
3963 
3964 /*===========================================================================
3965  * FUNCTION   : deinitParameters
3966  *
3967  * DESCRIPTION: de-initialize camera parameters
3968  *
3969  * PARAMETERS :
3970  *
3971  * RETURN     : NONE
3972  *==========================================================================*/
deinitParameters()3973 void QCamera3HardwareInterface::deinitParameters()
3974 {
3975     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
3976             CAM_MAPPING_BUF_TYPE_PARM_BUF);
3977 
3978     mParamHeap->deallocate();
3979     delete mParamHeap;
3980     mParamHeap = NULL;
3981 
3982     mParameters = NULL;
3983 }
3984 
3985 /*===========================================================================
3986  * FUNCTION   : calcMaxJpegSize
3987  *
3988  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
3989  *
3990  * PARAMETERS :
3991  *
3992  * RETURN     : max_jpeg_size
3993  *==========================================================================*/
calcMaxJpegSize(uint8_t camera_id)3994 int QCamera3HardwareInterface::calcMaxJpegSize(uint8_t camera_id)
3995 {
3996     int32_t max_jpeg_size = 0;
3997     int temp_width, temp_height;
3998     for (int i = 0; i < gCamCapability[camera_id]->picture_sizes_tbl_cnt; i++) {
3999         temp_width = gCamCapability[camera_id]->picture_sizes_tbl[i].width;
4000         temp_height = gCamCapability[camera_id]->picture_sizes_tbl[i].height;
4001         if (temp_width * temp_height > max_jpeg_size ) {
4002             max_jpeg_size = temp_width * temp_height;
4003         }
4004     }
4005     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
4006     return max_jpeg_size;
4007 }
4008 
4009 /*===========================================================================
4010  * FUNCTION   : getMaxRawSize
4011  *
4012  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
4013  *
4014  * PARAMETERS :
4015  *
4016  * RETURN     : Largest supported Raw Dimension
4017  *==========================================================================*/
getMaxRawSize(uint8_t camera_id)4018 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint8_t camera_id)
4019 {
4020     int max_width = 0;
4021     cam_dimension_t maxRawSize;
4022 
4023     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
4024     for (int i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
4025         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
4026             max_width = gCamCapability[camera_id]->raw_dim[i].width;
4027             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
4028         }
4029     }
4030     return maxRawSize;
4031 }
4032 
4033 
4034 /*===========================================================================
4035  * FUNCTION   : calcMaxJpegDim
4036  *
4037  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
4038  *
4039  * PARAMETERS :
4040  *
4041  * RETURN     : max_jpeg_dim
4042  *==========================================================================*/
calcMaxJpegDim()4043 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
4044 {
4045     cam_dimension_t max_jpeg_dim;
4046     cam_dimension_t curr_jpeg_dim;
4047     max_jpeg_dim.width = 0;
4048     max_jpeg_dim.height = 0;
4049     curr_jpeg_dim.width = 0;
4050     curr_jpeg_dim.height = 0;
4051     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
4052         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
4053         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
4054         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
4055             max_jpeg_dim.width * max_jpeg_dim.height ) {
4056             max_jpeg_dim.width = curr_jpeg_dim.width;
4057             max_jpeg_dim.height = curr_jpeg_dim.height;
4058         }
4059     }
4060     return max_jpeg_dim;
4061 }
4062 
4063 
4064 /*===========================================================================
4065  * FUNCTION   : initStaticMetadata
4066  *
4067  * DESCRIPTION: initialize the static metadata
4068  *
4069  * PARAMETERS :
4070  *   @cameraId  : camera Id
4071  *
4072  * RETURN     : int32_t type of status
4073  *              0  -- success
4074  *              non-zero failure code
4075  *==========================================================================*/
initStaticMetadata(int cameraId)4076 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
4077 {
4078     int rc = 0;
4079     CameraMetadata staticInfo;
4080 
4081     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
4082     if (!facingBack)
4083         gCamCapability[cameraId]->supported_raw_dim_cnt = 0;
4084 
4085      /* android.info: hardware level */
4086     uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
4087       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
4088     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
4089         &supportedHardwareLevel, 1);
4090     /*HAL 3 only*/
4091     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4092                     &gCamCapability[cameraId]->min_focus_distance, 1);
4093 
4094     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
4095                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
4096 
4097     /*should be using focal lengths but sensor doesn't provide that info now*/
4098     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4099                       &gCamCapability[cameraId]->focal_length,
4100                       1);
4101 
4102     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4103                       gCamCapability[cameraId]->apertures,
4104                       gCamCapability[cameraId]->apertures_count);
4105 
4106     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4107                 gCamCapability[cameraId]->filter_densities,
4108                 gCamCapability[cameraId]->filter_densities_count);
4109 
4110 
4111     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4112                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
4113                       gCamCapability[cameraId]->optical_stab_modes_count);
4114 
4115     staticInfo.update(ANDROID_LENS_POSITION,
4116                       gCamCapability[cameraId]->lens_position,
4117                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
4118 
4119     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
4120                                        gCamCapability[cameraId]->lens_shading_map_size.height};
4121     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
4122                       lens_shading_map_size,
4123                       sizeof(lens_shading_map_size)/sizeof(int32_t));
4124 
4125     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
4126             gCamCapability[cameraId]->sensor_physical_size, 2);
4127 
4128     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
4129             gCamCapability[cameraId]->exposure_time_range, 2);
4130 
4131     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4132             &gCamCapability[cameraId]->max_frame_duration, 1);
4133 
4134     camera_metadata_rational baseGainFactor = {
4135             gCamCapability[cameraId]->base_gain_factor.numerator,
4136             gCamCapability[cameraId]->base_gain_factor.denominator};
4137     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
4138                       &baseGainFactor, 1);
4139 
4140     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4141                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
4142 
4143     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
4144                                   gCamCapability[cameraId]->pixel_array_size.height};
4145     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4146                       pixel_array_size, 2);
4147 
4148     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
4149                                                 gCamCapability[cameraId]->active_array_size.top,
4150                                                 gCamCapability[cameraId]->active_array_size.width,
4151                                                 gCamCapability[cameraId]->active_array_size.height};
4152     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4153                       active_array_size, 4);
4154 
4155     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
4156             &gCamCapability[cameraId]->white_level, 1);
4157 
4158     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
4159             gCamCapability[cameraId]->black_level_pattern, 4);
4160 
4161     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
4162                       &gCamCapability[cameraId]->flash_charge_duration, 1);
4163 
4164     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
4165                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
4166 
4167     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
4168     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
4169                       (int32_t*)&maxFaces, 1);
4170 
4171     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
4172     if (0 && gCamCapability[cameraId]->isTimestampCalibrated) {
4173         timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
4174     }
4175     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4176             &timestampSource, 1);
4177 
4178     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4179                       &gCamCapability[cameraId]->histogram_size, 1);
4180 
4181     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4182             &gCamCapability[cameraId]->max_histogram_count, 1);
4183 
4184     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
4185                                     gCamCapability[cameraId]->sharpness_map_size.height};
4186 
4187     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
4188             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
4189 
4190     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4191             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
4192 
4193     int32_t scalar_formats[] = {
4194             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
4195             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
4196             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
4197             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
4198             HAL_PIXEL_FORMAT_RAW10,
4199             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
4200     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
4201     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
4202                       scalar_formats,
4203                       scalar_formats_count);
4204 
4205     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
4206     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
4207               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
4208               available_processed_sizes);
4209     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
4210                 available_processed_sizes,
4211                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
4212 
4213     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
4214     makeTable(gCamCapability[cameraId]->raw_dim,
4215               gCamCapability[cameraId]->supported_raw_dim_cnt,
4216               available_raw_sizes);
4217     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
4218                 available_raw_sizes,
4219                 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
4220 
4221     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
4222     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
4223                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
4224                  available_fps_ranges);
4225     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4226             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
4227 
4228     camera_metadata_rational exposureCompensationStep = {
4229             gCamCapability[cameraId]->exp_compensation_step.numerator,
4230             gCamCapability[cameraId]->exp_compensation_step.denominator};
4231     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
4232                       &exposureCompensationStep, 1);
4233 
4234     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
4235     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4236                       availableVstabModes, sizeof(availableVstabModes));
4237 
4238     /*HAL 1 and HAL 3 common*/
4239     float maxZoom = 4;
4240     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4241             &maxZoom, 1);
4242 
4243     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
4244     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
4245 
4246     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
4247     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
4248         max3aRegions[2] = 0; /* AF not supported */
4249     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
4250             max3aRegions, 3);
4251 
4252     uint8_t availableFaceDetectModes[] = {
4253             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
4254             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
4255     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4256                       availableFaceDetectModes,
4257                       sizeof(availableFaceDetectModes));
4258 
4259     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
4260                                            gCamCapability[cameraId]->exposure_compensation_max};
4261     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
4262             exposureCompensationRange,
4263             sizeof(exposureCompensationRange)/sizeof(int32_t));
4264 
4265     uint8_t lensFacing = (facingBack) ?
4266             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
4267     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
4268 
4269     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4270                       available_thumbnail_sizes,
4271                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
4272 
4273     /*all sizes will be clubbed into this tag*/
4274     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
4275     uint8_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
4276             (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2,
4277              MAX_SIZES_CNT * 2,
4278              gCamCapability[cameraId]->active_array_size,
4279              gCamCapability[cameraId]->max_downscale_factor);
4280     /*android.scaler.availableStreamConfigurations*/
4281     int32_t max_stream_configs_size =
4282             gCamCapability[cameraId]->picture_sizes_tbl_cnt *
4283             sizeof(scalar_formats)/sizeof(int32_t) * 4;
4284     int32_t available_stream_configs[max_stream_configs_size];
4285     int idx = 0;
4286     for (int j = 0; j < scalar_formats_count; j++) {
4287         switch (scalar_formats[j]) {
4288         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4289         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4290         case HAL_PIXEL_FORMAT_RAW10:
4291             for (int i = 0;
4292                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4293                 available_stream_configs[idx] = scalar_formats[j];
4294                 available_stream_configs[idx+1] =
4295                     gCamCapability[cameraId]->raw_dim[i].width;
4296                 available_stream_configs[idx+2] =
4297                     gCamCapability[cameraId]->raw_dim[i].height;
4298                 available_stream_configs[idx+3] =
4299                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4300                 idx+=4;
4301             }
4302             break;
4303         case HAL_PIXEL_FORMAT_BLOB:
4304             for (int i = 0; i < jpeg_sizes_cnt/2; i++) {
4305                 available_stream_configs[idx] = scalar_formats[j];
4306                 available_stream_configs[idx+1] = available_jpeg_sizes[i*2];
4307                 available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1];
4308                 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4309                 idx+=4;
4310             }
4311             break;
4312         default:
4313             for (int i = 0;
4314                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4315                 available_stream_configs[idx] = scalar_formats[j];
4316                 available_stream_configs[idx+1] =
4317                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4318                 available_stream_configs[idx+2] =
4319                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4320                 available_stream_configs[idx+3] =
4321                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4322                 idx+=4;
4323             }
4324 
4325 
4326             break;
4327         }
4328     }
4329     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4330                       available_stream_configs, idx);
4331     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4332     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4333 
4334     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4335     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4336 
4337     /* android.scaler.availableMinFrameDurations */
4338     int64_t available_min_durations[max_stream_configs_size];
4339     idx = 0;
4340     for (int j = 0; j < scalar_formats_count; j++) {
4341         switch (scalar_formats[j]) {
4342         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4343         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4344         case HAL_PIXEL_FORMAT_RAW10:
4345             for (int i = 0;
4346                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4347                 available_min_durations[idx] = scalar_formats[j];
4348                 available_min_durations[idx+1] =
4349                     gCamCapability[cameraId]->raw_dim[i].width;
4350                 available_min_durations[idx+2] =
4351                     gCamCapability[cameraId]->raw_dim[i].height;
4352                 available_min_durations[idx+3] =
4353                     gCamCapability[cameraId]->raw_min_duration[i];
4354                 idx+=4;
4355             }
4356             break;
4357         default:
4358             for (int i = 0;
4359                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4360                 available_min_durations[idx] = scalar_formats[j];
4361                 available_min_durations[idx+1] =
4362                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4363                 available_min_durations[idx+2] =
4364                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4365                 available_min_durations[idx+3] =
4366                     gCamCapability[cameraId]->picture_min_duration[i];
4367                 idx+=4;
4368             }
4369             break;
4370         }
4371     }
4372     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
4373                       &available_min_durations[0], idx);
4374 
4375     int32_t max_jpeg_size = calcMaxJpegSize(cameraId);
4376     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
4377                       &max_jpeg_size, 1);
4378 
4379     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
4380     size_t size = 0;
4381     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
4382         int32_t val = lookupFwkName(EFFECT_MODES_MAP,
4383                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
4384                                    gCamCapability[cameraId]->supported_effects[i]);
4385         if (val != NAME_NOT_FOUND) {
4386             avail_effects[size] = (uint8_t)val;
4387             size++;
4388         }
4389     }
4390     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
4391                       avail_effects,
4392                       size);
4393 
4394     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
4395     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
4396     int32_t supported_scene_modes_cnt = 0;
4397     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
4398         int32_t val = lookupFwkName(SCENE_MODES_MAP,
4399                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4400                                 gCamCapability[cameraId]->supported_scene_modes[i]);
4401         if (val != NAME_NOT_FOUND) {
4402             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
4403             supported_indexes[supported_scene_modes_cnt] = i;
4404             supported_scene_modes_cnt++;
4405         }
4406     }
4407 
4408     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4409                       avail_scene_modes,
4410                       supported_scene_modes_cnt);
4411 
4412     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
4413     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
4414                       supported_scene_modes_cnt,
4415                       scene_mode_overrides,
4416                       supported_indexes,
4417                       cameraId);
4418     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
4419                       scene_mode_overrides,
4420                       supported_scene_modes_cnt*3);
4421 
4422     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
4423     size = 0;
4424     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
4425         int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
4426                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
4427                                  gCamCapability[cameraId]->supported_antibandings[i]);
4428         if (val != NAME_NOT_FOUND) {
4429             avail_antibanding_modes[size] = (uint8_t)val;
4430             size++;
4431         }
4432 
4433     }
4434     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4435                       avail_antibanding_modes,
4436                       size);
4437 
4438     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
4439     size = 0;
4440     if (0 == gCamCapability[cameraId]->aberration_modes_count) {
4441         avail_abberation_modes[0] =
4442                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
4443         size++;
4444     } else {
4445         for (size_t i = 0; i < gCamCapability[cameraId]->aberration_modes_count; i++) {
4446             int32_t val = lookupFwkName(COLOR_ABERRATION_MAP,
4447                     sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
4448                     gCamCapability[cameraId]->aberration_modes[i]);
4449             if (val != NAME_NOT_FOUND) {
4450                 avail_abberation_modes[size] = (uint8_t)val;
4451                 size++;
4452             } else {
4453                 ALOGE("%s: Invalid CAC mode %d", __func__,
4454                         gCamCapability[cameraId]->aberration_modes[i]);
4455                 break;
4456             }
4457         }
4458 
4459     }
4460     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4461             avail_abberation_modes,
4462             size);
4463 
4464     char cafProp[PROPERTY_VALUE_MAX];
4465     memset(cafProp, 0, sizeof(cafProp));
4466     property_get("persist.camera.caf.disable", cafProp, "0");
4467     uint8_t cafDisabled = atoi(cafProp);
4468 
4469     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
4470     size = 0;
4471     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
4472         if (cafDisabled &&
4473             ((gCamCapability[cameraId]->supported_focus_modes[i]
4474               == CAM_FOCUS_MODE_CONTINOUS_PICTURE) ||
4475              (gCamCapability[cameraId]->supported_focus_modes[i]
4476               == CAM_FOCUS_MODE_CONTINOUS_VIDEO)))
4477             continue;
4478 
4479         int32_t val = lookupFwkName(FOCUS_MODES_MAP,
4480                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4481                                 gCamCapability[cameraId]->supported_focus_modes[i]);
4482         if (val != NAME_NOT_FOUND) {
4483             avail_af_modes[size] = (uint8_t)val;
4484             size++;
4485         }
4486     }
4487     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
4488                       avail_af_modes,
4489                       size);
4490 
4491     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
4492     size = 0;
4493     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
4494         int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4495                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4496                                     gCamCapability[cameraId]->supported_white_balances[i]);
4497         if (val != NAME_NOT_FOUND) {
4498             avail_awb_modes[size] = (uint8_t)val;
4499             size++;
4500         }
4501     }
4502     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
4503                       avail_awb_modes,
4504                       size);
4505 
4506     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
4507     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
4508       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
4509 
4510     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
4511             available_flash_levels,
4512             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
4513 
4514     uint8_t flashAvailable;
4515     if (gCamCapability[cameraId]->flash_available)
4516         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
4517     else
4518         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
4519     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
4520             &flashAvailable, 1);
4521 
4522     uint8_t avail_ae_modes[5];
4523     size = 0;
4524     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
4525         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
4526         size++;
4527     }
4528     if (flashAvailable) {
4529         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
4530         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
4531         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4532     }
4533     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
4534                       avail_ae_modes,
4535                       size);
4536 
4537     int32_t sensitivity_range[2];
4538     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
4539     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
4540     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
4541                       sensitivity_range,
4542                       sizeof(sensitivity_range) / sizeof(int32_t));
4543 
4544     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4545                       &gCamCapability[cameraId]->max_analog_sensitivity,
4546                       1);
4547 
4548     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
4549     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
4550                       &sensor_orientation,
4551                       1);
4552 
4553     int32_t max_output_streams[3] = {
4554             MAX_STALLING_STREAMS,
4555             MAX_PROCESSED_STREAMS,
4556             MAX_RAW_STREAMS};
4557     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
4558                       max_output_streams,
4559                       3);
4560 
4561     uint8_t avail_leds = 0;
4562     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
4563                       &avail_leds, 0);
4564 
4565     uint8_t focus_dist_calibrated;
4566     int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
4567             sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
4568             gCamCapability[cameraId]->focus_dist_calibrated);
4569     if (val != NAME_NOT_FOUND) {
4570         focus_dist_calibrated = (uint8_t)val;
4571         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4572                      &focus_dist_calibrated, 1);
4573     }
4574 
4575     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
4576     size = 0;
4577     for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
4578             i++) {
4579         int32_t val = lookupFwkName(TEST_PATTERN_MAP,
4580                                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
4581                                     gCamCapability[cameraId]->supported_test_pattern_modes[i]);
4582         if (val != NAME_NOT_FOUND) {
4583             avail_testpattern_modes[size] = val;
4584             size++;
4585         }
4586     }
4587     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4588                       avail_testpattern_modes,
4589                       size);
4590 
4591     uint8_t max_pipeline_depth = MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY;
4592     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
4593                       &max_pipeline_depth,
4594                       1);
4595 
4596     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
4597     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4598                       &partial_result_count,
4599                        1);
4600 
4601     uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
4602     uint8_t available_capabilities_count = 0;
4603     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
4604     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
4605     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
4606     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS;
4607     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE;
4608     if (facingBack) {
4609         available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
4610     }
4611     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4612                       available_capabilities,
4613                       available_capabilities_count);
4614 
4615     int32_t max_input_streams = 0;
4616     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4617                       &max_input_streams,
4618                       1);
4619 
4620     int32_t io_format_map[] = {};
4621     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4622                       io_format_map, 0);
4623 
4624     int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:CAM_MAX_SYNC_LATENCY;
4625     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
4626                       &max_latency,
4627                       1);
4628 
4629     float optical_axis_angle[2];
4630     optical_axis_angle[0] = 0; //need to verify
4631     optical_axis_angle[1] = 0; //need to verify
4632     staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
4633                       optical_axis_angle,
4634                       2);
4635 
4636     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
4637     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4638                       available_hot_pixel_modes,
4639                       1);
4640 
4641     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
4642                                       ANDROID_EDGE_MODE_FAST};
4643     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4644                       available_edge_modes,
4645                       2);
4646 
4647     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
4648                                            ANDROID_NOISE_REDUCTION_MODE_FAST};
4649     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4650                       available_noise_red_modes,
4651                       2);
4652 
4653     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
4654                                          ANDROID_TONEMAP_MODE_FAST};
4655     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4656                       available_tonemap_modes,
4657                       2);
4658 
4659     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
4660     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4661                       available_hot_pixel_map_modes,
4662                       1);
4663 
4664     uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4665         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4666         gCamCapability[cameraId]->reference_illuminant1);
4667     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
4668                       &fwkReferenceIlluminant, 1);
4669 
4670     fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4671         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4672         gCamCapability[cameraId]->reference_illuminant2);
4673     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4674                       &fwkReferenceIlluminant, 1);
4675 
4676     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
4677                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
4678                       3*3);
4679 
4680     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
4681                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
4682                       3*3);
4683 
4684     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
4685                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
4686                       3*3);
4687 
4688     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
4689                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
4690                       3*3);
4691 
4692     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4693                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
4694                       3*3);
4695 
4696     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
4697                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
4698                       3*3);
4699 
4700     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
4701        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
4702        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
4703        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4704        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
4705        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4706        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
4707        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
4708        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
4709        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
4710        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
4711        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
4712        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4713        ANDROID_JPEG_GPS_COORDINATES,
4714        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
4715        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
4716        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
4717        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4718        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
4719        ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
4720        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
4721        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
4722        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
4723        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
4724        ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
4725        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4726        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
4727        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4728        ANDROID_BLACK_LEVEL_LOCK };
4729 
4730     size_t request_keys_cnt =
4731             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
4732     //NOTE: Please increase available_request_keys array size before
4733     //adding any new entries.
4734     int32_t available_request_keys[request_keys_cnt+1];
4735     memcpy(available_request_keys, request_keys_basic,
4736             sizeof(request_keys_basic));
4737     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4738         available_request_keys[request_keys_cnt++] =
4739                 ANDROID_CONTROL_AF_REGIONS;
4740     }
4741     //NOTE: Please increase available_request_keys array size before
4742     //adding any new entries.
4743     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
4744                       available_request_keys, request_keys_cnt);
4745 
4746     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
4747        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
4748        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
4749        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
4750        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
4751        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4752        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
4753        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
4754        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
4755        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4756        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
4757        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
4758        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
4759        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
4760        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4761        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
4762        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4763        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
4764        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4765        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4766        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
4767        ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
4768        ANDROID_STATISTICS_FACE_SCORES};
4769     size_t result_keys_cnt =
4770             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
4771     //NOTE: Please increase available_result_keys array size before
4772     //adding any new entries.
4773     int32_t available_result_keys[result_keys_cnt+3];
4774     memcpy(available_result_keys, result_keys_basic,
4775             sizeof(result_keys_basic));
4776     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4777         available_result_keys[result_keys_cnt++] =
4778                 ANDROID_CONTROL_AF_REGIONS;
4779     }
4780     if (facingBack) {
4781        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_NOISE_PROFILE;
4782        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_GREEN_SPLIT;
4783     }
4784     //NOTE: Please increase available_result_keys array size before
4785     //adding any new entries.
4786 
4787     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4788                       available_result_keys, result_keys_cnt);
4789 
4790     int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4791        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4792        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
4793        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
4794        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4795        ANDROID_SCALER_CROPPING_TYPE,
4796        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4797        ANDROID_SYNC_MAX_LATENCY,
4798        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4799        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4800        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
4801        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
4802        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4803        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4804        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4805        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4806        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4807        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4808        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4809        ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
4810        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4811        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4812        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4813        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4814        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4815        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4816        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4817        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
4818        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
4819        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4820        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
4821        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4822        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4823        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4824        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4825        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4826        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
4827        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4828        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4829        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4830        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4831        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4832        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4833        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4834        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4835        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4836        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4837        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4838        ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
4839     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
4840                       available_characteristics_keys,
4841                       sizeof(available_characteristics_keys)/sizeof(int32_t));
4842 
4843     /*available stall durations depend on the hw + sw and will be different for different devices */
4844     /*have to add for raw after implementation*/
4845     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
4846     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
4847 
4848     size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
4849     int64_t available_stall_durations[available_stall_size];
4850     idx = 0;
4851     for (uint32_t j = 0; j < stall_formats_count; j++) {
4852        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
4853           for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4854              available_stall_durations[idx]   = stall_formats[j];
4855              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4856              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4857              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
4858              idx+=4;
4859           }
4860        } else {
4861           for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4862              available_stall_durations[idx]   = stall_formats[j];
4863              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
4864              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
4865              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
4866              idx+=4;
4867           }
4868        }
4869     }
4870     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
4871                       available_stall_durations,
4872                       idx);
4873     //QCAMERA3_OPAQUE_RAW
4874     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4875     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4876     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
4877     case LEGACY_RAW:
4878         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4879             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
4880         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4881             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4882         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4883             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
4884         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4885         break;
4886     case MIPI_RAW:
4887         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4888             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
4889         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4890             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
4891         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4892             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
4893         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
4894         break;
4895     default:
4896         ALOGE("%s: unknown opaque_raw_format %d", __func__,
4897                 gCamCapability[cameraId]->opaque_raw_fmt);
4898         break;
4899     }
4900     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
4901 
4902     if (gCamCapability[cameraId]->supported_raw_dim_cnt) {
4903         int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
4904         for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4905             cam_stream_buf_plane_info_t buf_planes;
4906             strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
4907             strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
4908             mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
4909                 &gCamCapability[cameraId]->padding_info, &buf_planes);
4910             strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
4911         }
4912         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
4913                 3*gCamCapability[cameraId]->supported_raw_dim_cnt);
4914     }
4915     gStaticMetadata[cameraId] = staticInfo.release();
4916     return rc;
4917 }
4918 
4919 /*===========================================================================
4920  * FUNCTION   : makeTable
4921  *
4922  * DESCRIPTION: make a table of sizes
4923  *
4924  * PARAMETERS :
4925  *
4926  *
4927  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,uint8_t size,int32_t * sizeTable)4928 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
4929                                           int32_t* sizeTable)
4930 {
4931     int j = 0;
4932     for (int i = 0; i < size; i++) {
4933         sizeTable[j] = dimTable[i].width;
4934         sizeTable[j+1] = dimTable[i].height;
4935         j+=2;
4936     }
4937 }
4938 
4939 /*===========================================================================
4940  * FUNCTION   : makeFPSTable
4941  *
4942  * DESCRIPTION: make a table of fps ranges
4943  *
4944  * PARAMETERS :
4945  *
4946  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,uint8_t size,int32_t * fpsRangesTable)4947 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
4948                                           int32_t* fpsRangesTable)
4949 {
4950     int j = 0;
4951     for (int i = 0; i < size; i++) {
4952         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
4953         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
4954         j+=2;
4955     }
4956 }
4957 
4958 /*===========================================================================
4959  * FUNCTION   : makeOverridesList
4960  *
4961  * DESCRIPTION: make a list of scene mode overrides
4962  *
4963  * PARAMETERS :
4964  *
4965  *
4966  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,uint8_t size,uint8_t * overridesList,uint8_t * supported_indexes,int camera_id)4967 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
4968                                                   uint8_t size, uint8_t* overridesList,
4969                                                   uint8_t* supported_indexes,
4970                                                   int camera_id)
4971 {
4972     /*daemon will give a list of overrides for all scene modes.
4973       However we should send the fwk only the overrides for the scene modes
4974       supported by the framework*/
4975     int j = 0, index = 0, supt = 0;
4976     uint8_t focus_override;
4977     for (int i = 0; i < size; i++) {
4978         supt = 0;
4979         index = supported_indexes[i];
4980         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
4981         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
4982                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4983                                                     overridesTable[index].awb_mode);
4984         focus_override = (uint8_t)overridesTable[index].af_mode;
4985         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
4986            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
4987               supt = 1;
4988               break;
4989            }
4990         }
4991         if (supt) {
4992            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
4993                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4994                                               focus_override);
4995         } else {
4996            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
4997         }
4998         j+=3;
4999     }
5000 }
5001 
5002 /*===========================================================================
5003  * FUNCTION   : filterJpegSizes
5004  *
5005  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
5006  *              could be downscaled to
5007  *
5008  * PARAMETERS :
5009  *
5010  * RETURN     : length of jpegSizes array
5011  *==========================================================================*/
5012 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,uint8_t processedSizesCnt,uint8_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)5013 uint8_t QCamera3HardwareInterface::filterJpegSizes(int32_t* jpegSizes, int32_t* processedSizes,
5014                                                    uint8_t processedSizesCnt,
5015                                                    uint8_t maxCount,
5016                                                    cam_rect_t active_array_size,
5017                                                    uint8_t downscale_factor)
5018 {
5019    if (downscale_factor == 0) {
5020       downscale_factor = 1;
5021    }
5022     int32_t min_width = active_array_size.width / downscale_factor;
5023     int32_t min_height = active_array_size.height / downscale_factor;
5024     uint8_t jpegSizesCnt = 0;
5025     if (processedSizesCnt > maxCount) {
5026         processedSizesCnt = maxCount;
5027     }
5028     for (int i = 0; i < processedSizesCnt; i+=2) {
5029         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
5030             jpegSizes[jpegSizesCnt] = processedSizes[i];
5031             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
5032             jpegSizesCnt += 2;
5033         }
5034     }
5035     return jpegSizesCnt;
5036 }
5037 
5038 /*===========================================================================
5039  * FUNCTION   : getPreviewHalPixelFormat
5040  *
5041  * DESCRIPTION: convert the format to type recognized by framework
5042  *
5043  * PARAMETERS : format : the format from backend
5044  *
5045  ** RETURN    : format recognized by framework
5046  *
5047  *==========================================================================*/
getScalarFormat(int32_t format)5048 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
5049 {
5050     int32_t halPixelFormat;
5051 
5052     switch (format) {
5053     case CAM_FORMAT_YUV_420_NV12:
5054         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5055         break;
5056     case CAM_FORMAT_YUV_420_NV21:
5057         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5058         break;
5059     case CAM_FORMAT_YUV_420_NV21_ADRENO:
5060         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
5061         break;
5062     case CAM_FORMAT_YUV_420_YV12:
5063         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
5064         break;
5065     case CAM_FORMAT_YUV_422_NV16:
5066     case CAM_FORMAT_YUV_422_NV61:
5067     default:
5068         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5069         break;
5070     }
5071     return halPixelFormat;
5072 }
5073 /*===========================================================================
5074  * FUNCTION   : computeNoiseModelEntryS
5075  *
5076  * DESCRIPTION: function to map a given sensitivity to the S noise
5077  *              model parameters in the DNG noise model.
5078  *
5079  * PARAMETERS : sens : the sensor sensitivity
5080  *
5081  ** RETURN    : S (sensor amplification) noise
5082  *
5083  *==========================================================================*/
5084 
computeNoiseModelEntryS(int32_t sens)5085 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
5086    double s = 3.738032e-06 * sens + 3.651935e-04;
5087    return s < 0.0 ? 0.0 : s;
5088 }
5089 
5090 /*===========================================================================
5091  * FUNCTION   : computeNoiseModelEntryO
5092  *
5093  * DESCRIPTION: function to map a given sensitivity to the O noise
5094  *              model parameters in the DNG noise model.
5095  *
5096  * PARAMETERS : sens : the sensor sensitivity
5097  *
5098  ** RETURN    : O (sensor readout) noise
5099  *
5100  *==========================================================================*/
5101 
computeNoiseModelEntryO(int32_t sens)5102 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
5103   double o = 4.499952e-07 * sens + -2.968624e-04;
5104   return o < 0.0 ? 0.0 : o;
5105 }
5106 
5107 /*===========================================================================
5108  * FUNCTION   : getSensorSensitivity
5109  *
5110  * DESCRIPTION: convert iso_mode to an integer value
5111  *
5112  * PARAMETERS : iso_mode : the iso_mode supported by sensor
5113  *
5114  ** RETURN    : sensitivity supported by sensor
5115  *
5116  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)5117 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
5118 {
5119     int32_t sensitivity;
5120 
5121     switch (iso_mode) {
5122     case CAM_ISO_MODE_100:
5123         sensitivity = 100;
5124         break;
5125     case CAM_ISO_MODE_200:
5126         sensitivity = 200;
5127         break;
5128     case CAM_ISO_MODE_400:
5129         sensitivity = 400;
5130         break;
5131     case CAM_ISO_MODE_800:
5132         sensitivity = 800;
5133         break;
5134     case CAM_ISO_MODE_1600:
5135         sensitivity = 1600;
5136         break;
5137     default:
5138         sensitivity = -1;
5139         break;
5140     }
5141     return sensitivity;
5142 }
5143 
5144 /*===========================================================================
5145  * FUNCTION   : AddSetParmEntryToBatch
5146  *
5147  * DESCRIPTION: add set parameter entry into batch
5148  *
5149  * PARAMETERS :
5150  *   @p_table     : ptr to parameter buffer
5151  *   @paramType   : parameter type
5152  *   @paramLength : length of parameter value
5153  *   @paramValue  : ptr to parameter value
5154  *
5155  * RETURN     : int32_t type of status
5156  *              NO_ERROR  -- success
5157  *              none-zero failure code
5158  *==========================================================================*/
AddSetParmEntryToBatch(parm_buffer_t * p_table,cam_intf_parm_type_t paramType,uint32_t paramLength,void * paramValue)5159 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
5160                                                           cam_intf_parm_type_t paramType,
5161                                                           uint32_t paramLength,
5162                                                           void *paramValue)
5163 {
5164     void* dst;
5165     if ((NULL == p_table) || (NULL == paramValue) ||
5166         (paramType >= CAM_INTF_PARM_MAX)) {
5167         ALOGE("%s: Invalid p_table: %p, paramValue: %p, param type: %d",
5168             __func__, p_table, paramValue, paramType);
5169         return BAD_VALUE;
5170     }
5171     /*************************************************************************
5172     *                   Copy contents into entry                             *
5173     *************************************************************************/
5174     if (paramLength > get_size_of(paramType)) {
5175         ALOGE("%s: input larger than max entry size, type=%d, length =%d",
5176                 __func__, paramType, paramLength);
5177         return BAD_VALUE;
5178     }
5179     dst = get_pointer_of(paramType, p_table);
5180     if(NULL != dst){
5181         memcpy(dst, paramValue, paramLength);
5182         p_table->is_valid[paramType] = 1;
5183     }
5184     return NO_ERROR;
5185 }
5186 
5187 /*===========================================================================
5188  * FUNCTION   : lookupFwkName
5189  *
5190  * DESCRIPTION: In case the enum is not same in fwk and backend
5191  *              make sure the parameter is correctly propogated
5192  *
5193  * PARAMETERS  :
5194  *   @arr      : map between the two enums
5195  *   @len      : len of the map
5196  *   @hal_name : name of the hal_parm to map
5197  *
5198  * RETURN     : int type of status
5199  *              fwk_name  -- success
5200  *              none-zero failure code
5201  *==========================================================================*/
lookupFwkName(const QCameraMap arr[],int len,int hal_name)5202 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
5203                                              int len, int hal_name)
5204 {
5205 
5206     for (int i = 0; i < len; i++) {
5207         if (arr[i].hal_name == hal_name)
5208             return arr[i].fwk_name;
5209     }
5210 
5211     /* Not able to find matching framework type is not necessarily
5212      * an error case. This happens when mm-camera supports more attributes
5213      * than the frameworks do */
5214     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
5215     return NAME_NOT_FOUND;
5216 }
5217 
5218 /*===========================================================================
5219  * FUNCTION   : lookupHalName
5220  *
5221  * DESCRIPTION: In case the enum is not same in fwk and backend
5222  *              make sure the parameter is correctly propogated
5223  *
5224  * PARAMETERS  :
5225  *   @arr      : map between the two enums
5226  *   @len      : len of the map
5227  *   @fwk_name : name of the hal_parm to map
5228  *
5229  * RETURN     : int32_t type of status
5230  *              hal_name  -- success
5231  *              none-zero failure code
5232  *==========================================================================*/
lookupHalName(const QCameraMap arr[],int len,unsigned int fwk_name)5233 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
5234                                              int len, unsigned int fwk_name)
5235 {
5236     for (int i = 0; i < len; i++) {
5237        if (arr[i].fwk_name == fwk_name)
5238            return arr[i].hal_name;
5239     }
5240     ALOGE("%s: Cannot find matching hal type", __func__);
5241     return NAME_NOT_FOUND;
5242 }
5243 
5244 /*===========================================================================
5245  * FUNCTION   : lookupProp
5246  *
5247  * DESCRIPTION: lookup a value by its name
5248  *
5249  * PARAMETERS :
5250  *   @attr    : map contains <name, value>
5251  *   @len     : size of the map
5252  *   @name    : name to be looked up
5253  *
5254  * RETURN     : Value if found
5255  *              CAM_CDS_MODE_MAX if not found
5256  *==========================================================================*/
lookupProp(const QCameraPropMap arr[],int len,const char * name)5257 cam_cds_mode_type_t QCamera3HardwareInterface::lookupProp(const QCameraPropMap arr[],
5258         int len, const char *name)
5259 {
5260     if (name) {
5261         for (int i = 0; i < len; i++) {
5262             if (!strcmp(arr[i].desc, name)) {
5263                 return arr[i].val;
5264             }
5265         }
5266     }
5267     return CAM_CDS_MODE_MAX;
5268 }
5269 
5270 /*===========================================================================
5271  * FUNCTION   : getCapabilities
5272  *
5273  * DESCRIPTION: query camera capabilities
5274  *
5275  * PARAMETERS :
5276  *   @cameraId  : camera Id
5277  *   @info      : camera info struct to be filled in with camera capabilities
5278  *
5279  * RETURN     : int32_t type of status
5280  *              NO_ERROR  -- success
5281  *              none-zero failure code
5282  *==========================================================================*/
getCamInfo(int cameraId,struct camera_info * info)5283 int QCamera3HardwareInterface::getCamInfo(int cameraId,
5284                                     struct camera_info *info)
5285 {
5286     ATRACE_CALL();
5287     int rc = 0;
5288 
5289     if (NULL == gCamCapability[cameraId]) {
5290         rc = initCapabilities(cameraId);
5291         if (rc < 0) {
5292             //pthread_mutex_unlock(&g_camlock);
5293             return rc;
5294         }
5295     }
5296 
5297     if (NULL == gStaticMetadata[cameraId]) {
5298         rc = initStaticMetadata(cameraId);
5299         if (rc < 0) {
5300             return rc;
5301         }
5302     }
5303 
5304     switch(gCamCapability[cameraId]->position) {
5305     case CAM_POSITION_BACK:
5306         info->facing = CAMERA_FACING_BACK;
5307         break;
5308 
5309     case CAM_POSITION_FRONT:
5310         info->facing = CAMERA_FACING_FRONT;
5311         break;
5312 
5313     default:
5314         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
5315         rc = -1;
5316         break;
5317     }
5318 
5319 
5320     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
5321     info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
5322     info->static_camera_characteristics = gStaticMetadata[cameraId];
5323 
5324     return rc;
5325 }
5326 
5327 /*===========================================================================
5328  * FUNCTION   : translateCapabilityToMetadata
5329  *
5330  * DESCRIPTION: translate the capability into camera_metadata_t
5331  *
5332  * PARAMETERS : type of the request
5333  *
5334  *
5335  * RETURN     : success: camera_metadata_t*
5336  *              failure: NULL
5337  *
5338  *==========================================================================*/
translateCapabilityToMetadata(int type)5339 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
5340 {
5341     pthread_mutex_lock(&mMutex);
5342 
5343     if (mDefaultMetadata[type] != NULL) {
5344         pthread_mutex_unlock(&mMutex);
5345         return mDefaultMetadata[type];
5346     }
5347     //first time we are handling this request
5348     //fill up the metadata structure using the wrapper class
5349     CameraMetadata settings;
5350     //translate from cam_capability_t to camera_metadata_tag_t
5351     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
5352     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
5353     int32_t defaultRequestID = 0;
5354     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
5355 
5356     /* OIS disable */
5357     char ois_prop[PROPERTY_VALUE_MAX];
5358     memset(ois_prop, 0, sizeof(ois_prop));
5359     property_get("persist.camera.ois.disable", ois_prop, "0");
5360     uint8_t ois_disable = atoi(ois_prop);
5361 
5362     /* OIS/EIS disable */
5363     char eis_prop[PROPERTY_VALUE_MAX];
5364     memset(eis_prop, 0, sizeof(eis_prop));
5365     property_get("camera.eis.enable", eis_prop, "0");
5366     mEisEnable = atoi(eis_prop);
5367 
5368     /* Force video to use OIS */
5369     char videoOisProp[PROPERTY_VALUE_MAX];
5370     memset(videoOisProp, 0, sizeof(videoOisProp));
5371     property_get("persist.camera.ois.video", videoOisProp, "1");
5372     uint8_t forceVideoOis = atoi(videoOisProp);
5373 
5374     uint8_t controlIntent = 0;
5375     uint8_t focusMode;
5376     uint8_t vsMode;
5377     uint8_t optStabMode;
5378     uint8_t cacMode;
5379     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5380     switch (type) {
5381       case CAMERA3_TEMPLATE_PREVIEW:
5382         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
5383         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5384         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5385         break;
5386       case CAMERA3_TEMPLATE_STILL_CAPTURE:
5387         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
5388         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5389         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5390         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
5391         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
5392         break;
5393       case CAMERA3_TEMPLATE_VIDEO_RECORD:
5394         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
5395         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5396         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5397         if (forceVideoOis)
5398             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5399         break;
5400       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
5401         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
5402         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5403         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5404         if (forceVideoOis)
5405             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5406         break;
5407       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
5408         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
5409         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5410         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5411         break;
5412       case CAMERA3_TEMPLATE_MANUAL:
5413         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
5414         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5415         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5416         break;
5417       default:
5418         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
5419         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5420         break;
5421     }
5422     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
5423     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
5424     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
5425         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5426     }
5427     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
5428 
5429     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5430             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
5431         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5432     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5433             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
5434             || ois_disable)
5435         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5436     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
5437 
5438     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
5439             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
5440 
5441     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
5442     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
5443 
5444     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
5445     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
5446 
5447     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
5448     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
5449 
5450     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
5451     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
5452 
5453     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
5454     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
5455 
5456     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
5457     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
5458 
5459     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
5460     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
5461 
5462     /*flash*/
5463     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
5464     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
5465 
5466     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
5467     settings.update(ANDROID_FLASH_FIRING_POWER,
5468             &flashFiringLevel, 1);
5469 
5470     /* lens */
5471     float default_aperture = gCamCapability[mCameraId]->apertures[0];
5472     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
5473 
5474     if (gCamCapability[mCameraId]->filter_densities_count) {
5475         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
5476         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
5477                         gCamCapability[mCameraId]->filter_densities_count);
5478     }
5479 
5480     float default_focal_length = gCamCapability[mCameraId]->focal_length;
5481     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
5482 
5483     float default_focus_distance = 0;
5484     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
5485 
5486     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
5487     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
5488 
5489     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5490     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5491 
5492     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
5493     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
5494 
5495     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
5496     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
5497 
5498     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
5499     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
5500 
5501     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
5502     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
5503 
5504     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5505     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5506 
5507     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5508     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
5509 
5510     /* Exposure time(Update the Min Exposure Time)*/
5511     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
5512     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
5513 
5514     /* frame duration */
5515     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
5516     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
5517 
5518     /* sensitivity */
5519     static const int32_t default_sensitivity = 100;
5520     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
5521 
5522     /*edge mode*/
5523     static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
5524     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
5525 
5526     /*noise reduction mode*/
5527     static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5528     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
5529 
5530     /*color correction mode*/
5531     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
5532     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
5533 
5534     /*transform matrix mode*/
5535     static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5536     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
5537 
5538     uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
5539     settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
5540 
5541     int32_t scaler_crop_region[4];
5542     scaler_crop_region[0] = 0;
5543     scaler_crop_region[1] = 0;
5544     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
5545     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
5546     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
5547 
5548     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
5549     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
5550 
5551     /*focus distance*/
5552     float focus_distance = 0.0;
5553     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
5554 
5555     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
5556     float max_range = 0.0;
5557     float max_fixed_fps = 0.0;
5558     int32_t fps_range[2] = {0, 0};
5559     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
5560             i++) {
5561         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
5562             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5563         if (type == CAMERA3_TEMPLATE_PREVIEW ||
5564                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
5565                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
5566             if (range > max_range) {
5567                 fps_range[0] =
5568                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5569                 fps_range[1] =
5570                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5571                 max_range = range;
5572             }
5573         } else {
5574             if (range < 0.01 && max_fixed_fps <
5575                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
5576                 fps_range[0] =
5577                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5578                 fps_range[1] =
5579                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5580                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5581             }
5582         }
5583     }
5584     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
5585 
5586     /*precapture trigger*/
5587     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
5588     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
5589 
5590     /*af trigger*/
5591     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
5592     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
5593 
5594     /* ae & af regions */
5595     int32_t active_region[] = {
5596             gCamCapability[mCameraId]->active_array_size.left,
5597             gCamCapability[mCameraId]->active_array_size.top,
5598             gCamCapability[mCameraId]->active_array_size.left +
5599                     gCamCapability[mCameraId]->active_array_size.width,
5600             gCamCapability[mCameraId]->active_array_size.top +
5601                     gCamCapability[mCameraId]->active_array_size.height,
5602             0};
5603     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
5604     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
5605 
5606     /* black level lock */
5607     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5608     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
5609 
5610     /* face detect mode */
5611     uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
5612     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
5613 
5614     /* lens shading map mode */
5615     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
5616     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type &&
5617         gCamCapability[mCameraId]->supported_raw_dim_cnt > 0) {
5618         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
5619     }
5620     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
5621 
5622     //special defaults for manual template
5623     if (type == CAMERA3_TEMPLATE_MANUAL) {
5624         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
5625         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
5626 
5627         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
5628         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
5629 
5630         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
5631         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
5632 
5633         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
5634         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
5635 
5636         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
5637         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
5638 
5639         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
5640         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
5641     }
5642 
5643     /* CDS default */
5644     char prop[PROPERTY_VALUE_MAX];
5645     memset(prop, 0, sizeof(prop));
5646     property_get("persist.camera.CDS", prop, "Auto");
5647     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
5648     cds_mode = lookupProp(CDS_MAP, sizeof(CDS_MAP)/sizeof(QCameraPropMap), prop);
5649     if (CAM_CDS_MODE_MAX == cds_mode) {
5650         cds_mode = CAM_CDS_MODE_AUTO;
5651     }
5652     int32_t mode = cds_mode;
5653     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
5654 
5655     mDefaultMetadata[type] = settings.release();
5656 
5657     pthread_mutex_unlock(&mMutex);
5658     return mDefaultMetadata[type];
5659 }
5660 
5661 /*===========================================================================
5662  * FUNCTION   : setFrameParameters
5663  *
5664  * DESCRIPTION: set parameters per frame as requested in the metadata from
5665  *              framework
5666  *
5667  * PARAMETERS :
5668  *   @request   : request that needs to be serviced
5669  *   @streamID : Stream ID of all the requested streams
5670  *
5671  * RETURN     : success: NO_ERROR
5672  *              failure:
5673  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,uint32_t snapshotStreamId)5674 int QCamera3HardwareInterface::setFrameParameters(
5675                     camera3_capture_request_t *request,
5676                     cam_stream_ID_t streamID,
5677                     uint32_t snapshotStreamId)
5678 {
5679     /*translate from camera_metadata_t type to parm_type_t*/
5680     int rc = 0;
5681     int32_t hal_version = CAM_HAL_V3;
5682 
5683     memset(mParameters, 0, sizeof(parm_buffer_t));
5684     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
5685                 sizeof(hal_version), &hal_version);
5686     if (rc < 0) {
5687         ALOGE("%s: Failed to set hal version in the parameters", __func__);
5688         return BAD_VALUE;
5689     }
5690 
5691     /*we need to update the frame number in the parameters*/
5692     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
5693                                 sizeof(request->frame_number), &(request->frame_number));
5694     if (rc < 0) {
5695         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5696         return BAD_VALUE;
5697     }
5698 
5699     /* Update stream id of all the requested buffers */
5700     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
5701                                 sizeof(cam_stream_ID_t), &streamID);
5702 
5703     if (rc < 0) {
5704         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
5705         return BAD_VALUE;
5706     }
5707 
5708     if(request->settings != NULL){
5709         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
5710     }
5711 
5712     return rc;
5713 }
5714 
5715 /*===========================================================================
5716  * FUNCTION   : setReprocParameters
5717  *
5718  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
5719  *              return it.
5720  *
5721  * PARAMETERS :
5722  *   @request   : request that needs to be serviced
5723  *
5724  * RETURN     : success: NO_ERROR
5725  *              failure:
5726  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)5727 int32_t QCamera3HardwareInterface::setReprocParameters(
5728         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
5729         uint32_t snapshotStreamId)
5730 {
5731     /*translate from camera_metadata_t type to parm_type_t*/
5732     int rc = 0;
5733 
5734     if (NULL == request->settings){
5735         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
5736         return BAD_VALUE;
5737     }
5738 
5739     if (NULL == reprocParam) {
5740         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
5741         return BAD_VALUE;
5742     }
5743     memset(reprocParam, 0, sizeof(metadata_buffer_t));
5744 
5745     /*we need to update the frame number in the parameters*/
5746     rc = AddSetParmEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
5747                                 sizeof(request->frame_number), &(request->frame_number));
5748     if (rc < 0) {
5749         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5750         return rc;
5751     }
5752 
5753     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
5754     if (rc < 0) {
5755         ALOGE("%s: Failed to translate reproc request", __func__);
5756         return rc;
5757     }
5758 
5759     CameraMetadata frame_settings;
5760     frame_settings = request->settings;
5761     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
5762             frame_settings.exists(QCAMERA3_CROP_REPROCESS) &&
5763             frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) {
5764         int32_t *crop_count =
5765                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
5766         int32_t *crop_data =
5767                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
5768         int32_t *crop_stream_ids =
5769                 frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32;
5770         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
5771             bool found = false;
5772             int32_t i;
5773             for (i = 0; i < *crop_count; i++) {
5774                 if (crop_stream_ids[i] == (int32_t) request->input_buffer->stream) {
5775                     found = true;
5776                     break;
5777                 }
5778             }
5779 
5780             if (found) {
5781                 cam_crop_data_t crop_meta;
5782                 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
5783                 crop_meta.num_of_streams = 1;
5784                 crop_meta.crop_info[0].crop.left   = crop_data[i*4];
5785                 crop_meta.crop_info[0].crop.top    = crop_data[i*4 + 1];
5786                 crop_meta.crop_info[0].crop.width  = crop_data[i*4 + 2];
5787                 crop_meta.crop_info[0].crop.height = crop_data[i*4 + 3];
5788                 rc = AddSetParmEntryToBatch(reprocParam,
5789                         CAM_INTF_META_CROP_DATA,
5790                         sizeof(cam_crop_data_t), &crop_meta);
5791                 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
5792                         __func__,
5793                         request->input_buffer->stream,
5794                         crop_meta.crop_info[0].crop.left,
5795                         crop_meta.crop_info[0].crop.top,
5796                         crop_meta.crop_info[0].crop.width,
5797                         crop_meta.crop_info[0].crop.height);
5798             } else {
5799                 ALOGE("%s: No matching reprocess input stream found!", __func__);
5800             }
5801         } else {
5802             ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
5803         }
5804     }
5805 
5806     return rc;
5807 }
5808 
5809 /*===========================================================================
5810  * FUNCTION   : translateToHalMetadata
5811  *
5812  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
5813  *
5814  *
5815  * PARAMETERS :
5816  *   @request  : request sent from framework
5817  *
5818  *
5819  * RETURN     : success: NO_ERROR
5820  *              failure:
5821  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)5822 int QCamera3HardwareInterface::translateToHalMetadata
5823                                   (const camera3_capture_request_t *request,
5824                                    metadata_buffer_t *hal_metadata,
5825                                    uint32_t snapshotStreamId)
5826 {
5827     int rc = 0;
5828     CameraMetadata frame_settings;
5829     frame_settings = request->settings;
5830 
5831     /* Do not change the order of the following list unless you know what you are
5832      * doing.
5833      * The order is laid out in such a way that parameters in the front of the table
5834      * may be used to override the parameters later in the table. Examples are:
5835      * 1. META_MODE should precede AEC/AWB/AF MODE
5836      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
5837      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
5838      * 4. Any mode should precede it's corresponding settings
5839      */
5840     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
5841         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
5842         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_MODE,
5843                 sizeof(metaMode), &metaMode);
5844         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
5845            camera_metadata_entry entry = frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
5846            if (0 < entry.count) {
5847                uint8_t fwk_sceneMode = entry.data.u8[0];
5848                uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
5849                                                  sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
5850                                                  fwk_sceneMode);
5851                rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5852                     sizeof(sceneMode), &sceneMode);
5853            }
5854         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
5855            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
5856            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5857                 sizeof(sceneMode), &sceneMode);
5858         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
5859            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
5860            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5861                 sizeof(sceneMode), &sceneMode);
5862         }
5863     }
5864 
5865     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
5866         uint8_t fwk_aeMode =
5867             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
5868         uint8_t aeMode;
5869         int32_t redeye;
5870 
5871         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
5872             aeMode = CAM_AE_MODE_OFF;
5873         } else {
5874             aeMode = CAM_AE_MODE_ON;
5875         }
5876         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
5877             redeye = 1;
5878         } else {
5879             redeye = 0;
5880         }
5881 
5882         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
5883                                           sizeof(AE_FLASH_MODE_MAP),
5884                                           fwk_aeMode);
5885         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
5886                 sizeof(aeMode), &aeMode);
5887         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
5888                 sizeof(flashMode), &flashMode);
5889         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
5890                 sizeof(redeye), &redeye);
5891     }
5892 
5893     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
5894         uint8_t fwk_whiteLevel =
5895             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
5896         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
5897                 sizeof(WHITE_BALANCE_MODES_MAP),
5898                 fwk_whiteLevel);
5899         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
5900                 sizeof(whiteLevel), &whiteLevel);
5901     }
5902 
5903     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5904         uint8_t fwk_cacMode =
5905                 frame_settings.find(
5906                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5907         int8_t val = lookupHalName(COLOR_ABERRATION_MAP,
5908                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
5909                 fwk_cacMode);
5910         if (NAME_NOT_FOUND != val) {
5911             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
5912             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_CAC,
5913                     sizeof(cacMode), &cacMode);
5914         } else {
5915             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
5916         }
5917     }
5918 
5919     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
5920         uint8_t fwk_focusMode =
5921             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
5922         uint8_t focusMode;
5923         focusMode = lookupHalName(FOCUS_MODES_MAP,
5924                                    sizeof(FOCUS_MODES_MAP),
5925                                    fwk_focusMode);
5926         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
5927                 sizeof(focusMode), &focusMode);
5928     }
5929 
5930     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
5931         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
5932         rc = AddSetParmEntryToBatch(hal_metadata,
5933                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
5934                 sizeof(focalDistance), &focalDistance);
5935     }
5936 
5937     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
5938         uint8_t fwk_antibandingMode =
5939             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
5940         int32_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
5941                      sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
5942                      fwk_antibandingMode);
5943         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
5944                 sizeof(hal_antibandingMode), &hal_antibandingMode);
5945     }
5946 
5947     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
5948         int32_t expCompensation = frame_settings.find(
5949             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
5950         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
5951             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
5952         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
5953             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
5954         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
5955           sizeof(expCompensation), &expCompensation);
5956     }
5957 
5958     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
5959         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
5960         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
5961                 sizeof(aeLock), &aeLock);
5962     }
5963     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5964         cam_fps_range_t fps_range;
5965         fps_range.min_fps =
5966             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
5967         fps_range.max_fps =
5968             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
5969         fps_range.video_min_fps = fps_range.min_fps;
5970         fps_range.video_max_fps = fps_range.max_fps;
5971         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
5972                 sizeof(fps_range), &fps_range);
5973     }
5974 
5975     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
5976         uint8_t awbLock =
5977             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
5978         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
5979                 sizeof(awbLock), &awbLock);
5980     }
5981 
5982     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
5983         uint8_t fwk_effectMode =
5984             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
5985         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
5986                 sizeof(EFFECT_MODES_MAP),
5987                 fwk_effectMode);
5988         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
5989                 sizeof(effectMode), &effectMode);
5990     }
5991 
5992     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
5993         uint8_t colorCorrectMode =
5994             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
5995         rc =
5996             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
5997                     sizeof(colorCorrectMode), &colorCorrectMode);
5998     }
5999 
6000     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
6001         cam_color_correct_gains_t colorCorrectGains;
6002         for (int i = 0; i < 4; i++) {
6003             colorCorrectGains.gains[i] =
6004                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
6005         }
6006         rc =
6007             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
6008                     sizeof(colorCorrectGains), &colorCorrectGains);
6009     }
6010 
6011     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
6012         cam_color_correct_matrix_t colorCorrectTransform;
6013         cam_rational_type_t transform_elem;
6014         int num = 0;
6015         for (int i = 0; i < 3; i++) {
6016            for (int j = 0; j < 3; j++) {
6017               transform_elem.numerator =
6018                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
6019               transform_elem.denominator =
6020                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
6021               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
6022               num++;
6023            }
6024         }
6025         rc =
6026             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
6027                     sizeof(colorCorrectTransform), &colorCorrectTransform);
6028     }
6029 
6030     cam_trigger_t aecTrigger;
6031     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
6032     aecTrigger.trigger_id = -1;
6033     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
6034         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
6035         aecTrigger.trigger =
6036             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
6037         aecTrigger.trigger_id =
6038             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
6039         rc = AddSetParmEntryToBatch(hal_metadata,
6040                 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
6041                 sizeof(aecTrigger), &aecTrigger);
6042     }
6043     /*af_trigger must come with a trigger id*/
6044     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
6045         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
6046         cam_trigger_t af_trigger;
6047         af_trigger.trigger =
6048             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
6049         af_trigger.trigger_id =
6050             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
6051         rc = AddSetParmEntryToBatch(hal_metadata,
6052                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
6053     }
6054 
6055     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
6056         int32_t demosaic =
6057             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
6058         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
6059                 sizeof(demosaic), &demosaic);
6060     }
6061 
6062     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
6063         cam_edge_application_t edge_application;
6064         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
6065         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
6066             edge_application.sharpness = 0;
6067         } else {
6068             if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
6069                 uint8_t edgeStrength =
6070                     frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
6071                 edge_application.sharpness = (int32_t)edgeStrength;
6072             } else {
6073                 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
6074             }
6075         }
6076         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
6077                 sizeof(edge_application), &edge_application);
6078     }
6079 
6080     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
6081         int32_t respectFlashMode = 1;
6082         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
6083             uint8_t fwk_aeMode =
6084                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
6085             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
6086                 respectFlashMode = 0;
6087                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
6088                     __func__);
6089             }
6090         }
6091         if (respectFlashMode) {
6092             uint8_t flashMode =
6093                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
6094             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
6095                                           sizeof(FLASH_MODES_MAP),
6096                                           flashMode);
6097             CDBG_HIGH("%s: flash mode after mapping %d", __func__, flashMode);
6098             // To check: CAM_INTF_META_FLASH_MODE usage
6099             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
6100                           sizeof(flashMode), &flashMode);
6101         }
6102     }
6103 
6104     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
6105         uint8_t flashPower =
6106             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
6107         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
6108                 sizeof(flashPower), &flashPower);
6109     }
6110 
6111     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
6112         int64_t flashFiringTime =
6113             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
6114         rc = AddSetParmEntryToBatch(hal_metadata,
6115                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
6116     }
6117 
6118     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
6119         uint8_t hotPixelMode =
6120             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
6121         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
6122                 sizeof(hotPixelMode), &hotPixelMode);
6123     }
6124 
6125     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
6126         float lensAperture =
6127             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
6128         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
6129                 sizeof(lensAperture), &lensAperture);
6130     }
6131 
6132     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
6133         float filterDensity =
6134             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
6135         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
6136                 sizeof(filterDensity), &filterDensity);
6137     }
6138 
6139     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
6140         float focalLength =
6141             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
6142         rc = AddSetParmEntryToBatch(hal_metadata,
6143                 CAM_INTF_META_LENS_FOCAL_LENGTH,
6144                 sizeof(focalLength), &focalLength);
6145     }
6146 
6147     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
6148         uint8_t optStabMode =
6149             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
6150         rc = AddSetParmEntryToBatch(hal_metadata,
6151                 CAM_INTF_META_LENS_OPT_STAB_MODE,
6152                 sizeof(optStabMode), &optStabMode);
6153     }
6154 
6155     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
6156         uint8_t noiseRedMode =
6157             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
6158         rc = AddSetParmEntryToBatch(hal_metadata,
6159                 CAM_INTF_META_NOISE_REDUCTION_MODE,
6160                 sizeof(noiseRedMode), &noiseRedMode);
6161     }
6162 
6163     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
6164         uint8_t noiseRedStrength =
6165             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
6166         rc = AddSetParmEntryToBatch(hal_metadata,
6167                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
6168                 sizeof(noiseRedStrength), &noiseRedStrength);
6169     }
6170 
6171     cam_crop_region_t scalerCropRegion;
6172     bool scalerCropSet = false;
6173     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
6174         scalerCropRegion.left =
6175             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
6176         scalerCropRegion.top =
6177             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
6178         scalerCropRegion.width =
6179             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
6180         scalerCropRegion.height =
6181             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
6182         rc = AddSetParmEntryToBatch(hal_metadata,
6183                 CAM_INTF_META_SCALER_CROP_REGION,
6184                 sizeof(scalerCropRegion), &scalerCropRegion);
6185         scalerCropSet = true;
6186     }
6187 
6188     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
6189         int64_t sensorExpTime =
6190             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
6191         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
6192         rc = AddSetParmEntryToBatch(hal_metadata,
6193                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
6194                 sizeof(sensorExpTime), &sensorExpTime);
6195     }
6196 
6197     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
6198         int64_t sensorFrameDuration =
6199             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
6200         int64_t minFrameDuration = getMinFrameDuration(request);
6201         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
6202         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
6203             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
6204         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
6205         rc = AddSetParmEntryToBatch(hal_metadata,
6206                 CAM_INTF_META_SENSOR_FRAME_DURATION,
6207                 sizeof(sensorFrameDuration), &sensorFrameDuration);
6208     }
6209 
6210     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
6211         int32_t sensorSensitivity =
6212             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
6213         if (sensorSensitivity <
6214                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
6215             sensorSensitivity =
6216                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
6217         if (sensorSensitivity >
6218                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
6219             sensorSensitivity =
6220                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
6221         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
6222         rc = AddSetParmEntryToBatch(hal_metadata,
6223                 CAM_INTF_META_SENSOR_SENSITIVITY,
6224                 sizeof(sensorSensitivity), &sensorSensitivity);
6225     }
6226 
6227     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
6228         uint8_t shadingMode =
6229             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
6230         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
6231                 sizeof(shadingMode), &shadingMode);
6232     }
6233 
6234     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
6235         uint8_t shadingStrength =
6236             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
6237         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
6238                 sizeof(shadingStrength), &shadingStrength);
6239     }
6240 
6241     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
6242         uint8_t fwk_facedetectMode =
6243             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
6244         uint8_t facedetectMode =
6245             lookupHalName(FACEDETECT_MODES_MAP,
6246                 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
6247         rc = AddSetParmEntryToBatch(hal_metadata,
6248                 CAM_INTF_META_STATS_FACEDETECT_MODE,
6249                 sizeof(facedetectMode), &facedetectMode);
6250     }
6251 
6252     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
6253         uint8_t histogramMode =
6254             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
6255         rc = AddSetParmEntryToBatch(hal_metadata,
6256                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
6257                 sizeof(histogramMode), &histogramMode);
6258     }
6259 
6260     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
6261         uint8_t sharpnessMapMode =
6262             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
6263         rc = AddSetParmEntryToBatch(hal_metadata,
6264                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
6265                 sizeof(sharpnessMapMode), &sharpnessMapMode);
6266     }
6267 
6268     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
6269         uint8_t tonemapMode =
6270             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
6271         rc = AddSetParmEntryToBatch(hal_metadata,
6272                 CAM_INTF_META_TONEMAP_MODE,
6273                 sizeof(tonemapMode), &tonemapMode);
6274     }
6275     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
6276     /*All tonemap channels will have the same number of points*/
6277     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
6278         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
6279         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
6280         cam_rgb_tonemap_curves tonemapCurves;
6281         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
6282         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6283             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
6284                     __func__, tonemapCurves.tonemap_points_cnt,
6285                     CAM_MAX_TONEMAP_CURVE_SIZE);
6286             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6287         }
6288 
6289         /* ch0 = G*/
6290         int point = 0;
6291         cam_tonemap_curve_t tonemapCurveGreen;
6292         for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
6293             for (int j = 0; j < 2; j++) {
6294                tonemapCurveGreen.tonemap_points[i][j] =
6295                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
6296                point++;
6297             }
6298         }
6299         tonemapCurves.curves[0] = tonemapCurveGreen;
6300 
6301         /* ch 1 = B */
6302         point = 0;
6303         cam_tonemap_curve_t tonemapCurveBlue;
6304         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6305             for (int j = 0; j < 2; j++) {
6306                tonemapCurveBlue.tonemap_points[i][j] =
6307                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
6308                point++;
6309             }
6310         }
6311         tonemapCurves.curves[1] = tonemapCurveBlue;
6312 
6313         /* ch 2 = R */
6314         point = 0;
6315         cam_tonemap_curve_t tonemapCurveRed;
6316         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6317             for (int j = 0; j < 2; j++) {
6318                tonemapCurveRed.tonemap_points[i][j] =
6319                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
6320                point++;
6321             }
6322         }
6323         tonemapCurves.curves[2] = tonemapCurveRed;
6324 
6325         rc = AddSetParmEntryToBatch(hal_metadata,
6326                 CAM_INTF_META_TONEMAP_CURVES,
6327                 sizeof(tonemapCurves), &tonemapCurves);
6328     }
6329 
6330     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
6331         uint8_t captureIntent =
6332             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
6333         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
6334                 sizeof(captureIntent), &captureIntent);
6335     }
6336 
6337     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
6338         uint8_t blackLevelLock =
6339             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
6340         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
6341                 sizeof(blackLevelLock), &blackLevelLock);
6342     }
6343 
6344     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
6345         uint8_t lensShadingMapMode =
6346             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
6347         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
6348                 sizeof(lensShadingMapMode), &lensShadingMapMode);
6349     }
6350 
6351     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
6352         cam_area_t roi;
6353         bool reset = true;
6354         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
6355         if (scalerCropSet) {
6356             reset = resetIfNeededROI(&roi, &scalerCropRegion);
6357         }
6358         if (reset) {
6359             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
6360                     sizeof(roi), &roi);
6361         }
6362     }
6363 
6364     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
6365         cam_area_t roi;
6366         bool reset = true;
6367         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
6368         if (scalerCropSet) {
6369             reset = resetIfNeededROI(&roi, &scalerCropRegion);
6370         }
6371         if (reset) {
6372             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
6373                     sizeof(roi), &roi);
6374         }
6375     }
6376 
6377     // CDS
6378     if (frame_settings.exists(QCAMERA3_CDS_MODE)) {
6379         int32_t* cds =
6380             frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
6381         if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) {
6382             ALOGE("%s: Invalid CDS mode %d!", __func__, *cds);
6383         } else {
6384             cam_cds_mode_type_t mode = (cam_cds_mode_type_t) *cds;
6385             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_CDS_MODE,
6386                 sizeof(mode), &mode);
6387         }
6388     }
6389 
6390     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
6391         cam_test_pattern_data_t testPatternData;
6392         uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
6393         uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
6394                sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
6395 
6396         memset(&testPatternData, 0, sizeof(testPatternData));
6397         testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
6398         if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
6399                 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
6400             int32_t* fwk_testPatternData = frame_settings.find(
6401                     ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
6402             testPatternData.r = fwk_testPatternData[0];
6403             testPatternData.b = fwk_testPatternData[3];
6404             switch (gCamCapability[mCameraId]->color_arrangement) {
6405             case CAM_FILTER_ARRANGEMENT_RGGB:
6406             case CAM_FILTER_ARRANGEMENT_GRBG:
6407                 testPatternData.gr = fwk_testPatternData[1];
6408                 testPatternData.gb = fwk_testPatternData[2];
6409                 break;
6410             case CAM_FILTER_ARRANGEMENT_GBRG:
6411             case CAM_FILTER_ARRANGEMENT_BGGR:
6412                 testPatternData.gr = fwk_testPatternData[2];
6413                 testPatternData.gb = fwk_testPatternData[1];
6414                 break;
6415             default:
6416                 ALOGE("%s: color arrangement %d is not supported", __func__,
6417                     gCamCapability[mCameraId]->color_arrangement);
6418                 break;
6419             }
6420         }
6421         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
6422             sizeof(testPatternData), &testPatternData);
6423     }
6424 
6425     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
6426         double *gps_coords =
6427             frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
6428         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
6429     }
6430 
6431     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
6432         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
6433         const char *gps_methods_src = (const char *)
6434                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
6435         uint32_t count = frame_settings.find(
6436                 ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
6437         memset(gps_methods, '\0', sizeof(gps_methods));
6438         strncpy(gps_methods, gps_methods_src, sizeof(gps_methods)-1);
6439         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
6440     }
6441 
6442     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
6443         int64_t gps_timestamp =
6444             frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
6445         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
6446     }
6447 
6448     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6449         int32_t orientation =
6450             frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6451         cam_rotation_info_t rotation_info;
6452         if (orientation == 0) {
6453            rotation_info.rotation = ROTATE_0;
6454         } else if (orientation == 90) {
6455            rotation_info.rotation = ROTATE_90;
6456         } else if (orientation == 180) {
6457            rotation_info.rotation = ROTATE_180;
6458         } else if (orientation == 270) {
6459            rotation_info.rotation = ROTATE_270;
6460         }
6461         rotation_info.streamId = snapshotStreamId;
6462         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
6463         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ROTATION, sizeof(rotation_info), &rotation_info);
6464     }
6465 
6466     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
6467         int8_t quality =
6468             frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
6469         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
6470     }
6471 
6472     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
6473         int8_t thumb_quality =
6474             frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
6475         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
6476     }
6477 
6478     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6479         cam_dimension_t dim;
6480         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6481         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6482         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
6483     }
6484 
6485     // Internal metadata
6486     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
6487         int32_t* privatedata =
6488                 frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.i32;
6489         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
6490                 sizeof(int32_t) * MAX_METADATA_PRIVATE_PAYLOAD_SIZE, privatedata);
6491     }
6492 
6493     // EV step
6494     rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
6495             sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
6496 
6497     return rc;
6498 }
6499 
6500 /*===========================================================================
6501  * FUNCTION   : captureResultCb
6502  *
6503  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
6504  *
6505  * PARAMETERS :
6506  *   @frame  : frame information from mm-camera-interface
6507  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
6508  *   @userdata: userdata
6509  *
6510  * RETURN     : NONE
6511  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,void * userdata)6512 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
6513                 camera3_stream_buffer_t *buffer,
6514                 uint32_t frame_number, void *userdata)
6515 {
6516     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
6517     if (hw == NULL) {
6518         ALOGE("%s: Invalid hw %p", __func__, hw);
6519         return;
6520     }
6521 
6522     hw->captureResultCb(metadata, buffer, frame_number);
6523     return;
6524 }
6525 
6526 
6527 /*===========================================================================
6528  * FUNCTION   : initialize
6529  *
6530  * DESCRIPTION: Pass framework callback pointers to HAL
6531  *
6532  * PARAMETERS :
6533  *
6534  *
6535  * RETURN     : Success : 0
6536  *              Failure: -ENODEV
6537  *==========================================================================*/
6538 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)6539 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
6540                                   const camera3_callback_ops_t *callback_ops)
6541 {
6542     CDBG("%s: E", __func__);
6543     QCamera3HardwareInterface *hw =
6544         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6545     if (!hw) {
6546         ALOGE("%s: NULL camera device", __func__);
6547         return -ENODEV;
6548     }
6549 
6550     int rc = hw->initialize(callback_ops);
6551     CDBG("%s: X", __func__);
6552     return rc;
6553 }
6554 
6555 /*===========================================================================
6556  * FUNCTION   : configure_streams
6557  *
6558  * DESCRIPTION:
6559  *
6560  * PARAMETERS :
6561  *
6562  *
6563  * RETURN     : Success: 0
6564  *              Failure: -EINVAL (if stream configuration is invalid)
6565  *                       -ENODEV (fatal error)
6566  *==========================================================================*/
6567 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)6568 int QCamera3HardwareInterface::configure_streams(
6569         const struct camera3_device *device,
6570         camera3_stream_configuration_t *stream_list)
6571 {
6572     CDBG("%s: E", __func__);
6573     QCamera3HardwareInterface *hw =
6574         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6575     if (!hw) {
6576         ALOGE("%s: NULL camera device", __func__);
6577         return -ENODEV;
6578     }
6579     int rc = hw->configureStreams(stream_list);
6580     CDBG("%s: X", __func__);
6581     return rc;
6582 }
6583 
6584 /*===========================================================================
6585  * FUNCTION   : construct_default_request_settings
6586  *
6587  * DESCRIPTION: Configure a settings buffer to meet the required use case
6588  *
6589  * PARAMETERS :
6590  *
6591  *
6592  * RETURN     : Success: Return valid metadata
6593  *              Failure: Return NULL
6594  *==========================================================================*/
6595 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)6596     construct_default_request_settings(const struct camera3_device *device,
6597                                         int type)
6598 {
6599 
6600     CDBG("%s: E", __func__);
6601     camera_metadata_t* fwk_metadata = NULL;
6602     QCamera3HardwareInterface *hw =
6603         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6604     if (!hw) {
6605         ALOGE("%s: NULL camera device", __func__);
6606         return NULL;
6607     }
6608 
6609     fwk_metadata = hw->translateCapabilityToMetadata(type);
6610 
6611     CDBG("%s: X", __func__);
6612     return fwk_metadata;
6613 }
6614 
6615 /*===========================================================================
6616  * FUNCTION   : process_capture_request
6617  *
6618  * DESCRIPTION:
6619  *
6620  * PARAMETERS :
6621  *
6622  *
6623  * RETURN     :
6624  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)6625 int QCamera3HardwareInterface::process_capture_request(
6626                     const struct camera3_device *device,
6627                     camera3_capture_request_t *request)
6628 {
6629     CDBG("%s: E", __func__);
6630     QCamera3HardwareInterface *hw =
6631         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6632     if (!hw) {
6633         ALOGE("%s: NULL camera device", __func__);
6634         return -EINVAL;
6635     }
6636 
6637     int rc = hw->processCaptureRequest(request);
6638     CDBG("%s: X", __func__);
6639     return rc;
6640 }
6641 
6642 /*===========================================================================
6643  * FUNCTION   : dump
6644  *
6645  * DESCRIPTION:
6646  *
6647  * PARAMETERS :
6648  *
6649  *
6650  * RETURN     :
6651  *==========================================================================*/
6652 
dump(const struct camera3_device * device,int fd)6653 void QCamera3HardwareInterface::dump(
6654                 const struct camera3_device *device, int fd)
6655 {
6656     /* Log level property is read when "adb shell dumpsys media.camera" is
6657        called so that the log level can be controlled without restarting
6658        the media server */
6659     getLogLevel();
6660 
6661     CDBG("%s: E", __func__);
6662     QCamera3HardwareInterface *hw =
6663         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6664     if (!hw) {
6665         ALOGE("%s: NULL camera device", __func__);
6666         return;
6667     }
6668 
6669     hw->dump(fd);
6670     CDBG("%s: X", __func__);
6671     return;
6672 }
6673 
6674 /*===========================================================================
6675  * FUNCTION   : flush
6676  *
6677  * DESCRIPTION:
6678  *
6679  * PARAMETERS :
6680  *
6681  *
6682  * RETURN     :
6683  *==========================================================================*/
6684 
flush(const struct camera3_device * device)6685 int QCamera3HardwareInterface::flush(
6686                 const struct camera3_device *device)
6687 {
6688     int rc;
6689     CDBG("%s: E", __func__);
6690     QCamera3HardwareInterface *hw =
6691         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6692     if (!hw) {
6693         ALOGE("%s: NULL camera device", __func__);
6694         return -EINVAL;
6695     }
6696 
6697     rc = hw->flush();
6698     CDBG("%s: X", __func__);
6699     return rc;
6700 }
6701 
6702 /*===========================================================================
6703  * FUNCTION   : close_camera_device
6704  *
6705  * DESCRIPTION:
6706  *
6707  * PARAMETERS :
6708  *
6709  *
6710  * RETURN     :
6711  *==========================================================================*/
close_camera_device(struct hw_device_t * device)6712 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
6713 {
6714     CDBG("%s: E", __func__);
6715     int ret = NO_ERROR;
6716     QCamera3HardwareInterface *hw =
6717         reinterpret_cast<QCamera3HardwareInterface *>(
6718             reinterpret_cast<camera3_device_t *>(device)->priv);
6719     if (!hw) {
6720         ALOGE("NULL camera device");
6721         return BAD_VALUE;
6722     }
6723     delete hw;
6724 
6725     CDBG("%s: X", __func__);
6726     return ret;
6727 }
6728 
6729 /*===========================================================================
6730  * FUNCTION   : getWaveletDenoiseProcessPlate
6731  *
6732  * DESCRIPTION: query wavelet denoise process plate
6733  *
6734  * PARAMETERS : None
6735  *
6736  * RETURN     : WNR prcocess plate vlaue
6737  *==========================================================================*/
getWaveletDenoiseProcessPlate()6738 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
6739 {
6740     char prop[PROPERTY_VALUE_MAX];
6741     memset(prop, 0, sizeof(prop));
6742     property_get("persist.denoise.process.plates", prop, "0");
6743     int processPlate = atoi(prop);
6744     switch(processPlate) {
6745     case 0:
6746         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
6747     case 1:
6748         return CAM_WAVELET_DENOISE_CBCR_ONLY;
6749     case 2:
6750         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
6751     case 3:
6752         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
6753     default:
6754         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
6755     }
6756 }
6757 
6758 /*===========================================================================
6759  * FUNCTION   : needRotationReprocess
6760  *
6761  * DESCRIPTION: if rotation needs to be done by reprocess in pp
6762  *
6763  * PARAMETERS : none
6764  *
6765  * RETURN     : true: needed
6766  *              false: no need
6767  *==========================================================================*/
needRotationReprocess()6768 bool QCamera3HardwareInterface::needRotationReprocess()
6769 {
6770     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
6771         // current rotation is not zero, and pp has the capability to process rotation
6772         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
6773         return true;
6774     }
6775 
6776     return false;
6777 }
6778 
6779 /*===========================================================================
6780  * FUNCTION   : needReprocess
6781  *
6782  * DESCRIPTION: if reprocess in needed
6783  *
6784  * PARAMETERS : none
6785  *
6786  * RETURN     : true: needed
6787  *              false: no need
6788  *==========================================================================*/
needReprocess(uint32_t postprocess_mask)6789 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
6790 {
6791     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
6792         // TODO: add for ZSL HDR later
6793         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
6794         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
6795             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
6796             return true;
6797         } else {
6798             CDBG_HIGH("%s: already post processed frame", __func__);
6799             return false;
6800         }
6801     }
6802     return needRotationReprocess();
6803 }
6804 
6805 /*===========================================================================
6806  * FUNCTION   : needJpegRotation
6807  *
6808  * DESCRIPTION: if rotation from jpeg is needed
6809  *
6810  * PARAMETERS : none
6811  *
6812  * RETURN     : true: needed
6813  *              false: no need
6814  *==========================================================================*/
needJpegRotation()6815 bool QCamera3HardwareInterface::needJpegRotation()
6816 {
6817    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
6818     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
6819        CDBG("%s: Need Jpeg to do the rotation", __func__);
6820        return true;
6821     }
6822     return false;
6823 }
6824 
6825 /*===========================================================================
6826  * FUNCTION   : addOfflineReprocChannel
6827  *
6828  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
6829  *              coming from input channel
6830  *
6831  * PARAMETERS :
6832  *   @config  : reprocess configuration
6833  *
6834  *
6835  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
6836  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3PicChannel * picChHandle,metadata_buffer_t * metadata)6837 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
6838         const reprocess_config_t &config, QCamera3PicChannel *picChHandle,
6839         metadata_buffer_t *metadata)
6840 {
6841     int32_t rc = NO_ERROR;
6842     QCamera3ReprocessChannel *pChannel = NULL;
6843 
6844     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
6845             mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, picChHandle);
6846     if (NULL == pChannel) {
6847         ALOGE("%s: no mem for reprocess channel", __func__);
6848         return NULL;
6849     }
6850 
6851     rc = pChannel->initialize(IS_TYPE_NONE, mCaptureIntent);
6852     if (rc != NO_ERROR) {
6853         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
6854         delete pChannel;
6855         return NULL;
6856     }
6857 
6858     // pp feature config
6859     cam_pp_feature_config_t pp_config;
6860     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
6861 
6862     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET;
6863 
6864     rc = pChannel->addReprocStreamsFromSource(pp_config,
6865             config,
6866             IS_TYPE_NONE,
6867             mMetadataChannel);
6868 
6869     if (rc != NO_ERROR) {
6870         delete pChannel;
6871         return NULL;
6872     }
6873     return pChannel;
6874 }
6875 
6876 
isCACEnabled()6877 bool  QCamera3HardwareInterface::isCACEnabled() {
6878     char prop[PROPERTY_VALUE_MAX];
6879     memset(prop, 0, sizeof(prop));
6880     property_get("persist.camera.feature.cac", prop, "0");
6881     int enableCAC = atoi(prop);
6882     return enableCAC;
6883 }
6884 /*===========================================================================
6885 * FUNCTION   : getLogLevel
6886 *
6887 * DESCRIPTION: Reads the log level property into a variable
6888 *
6889 * PARAMETERS :
6890 *   None
6891 *
6892 * RETURN     :
6893 *   None
6894 *==========================================================================*/
getLogLevel()6895 void QCamera3HardwareInterface::getLogLevel()
6896 {
6897     char prop[PROPERTY_VALUE_MAX];
6898 
6899     property_get("persist.camera.logs", prop, "0");
6900     gCamHal3LogLevel = atoi(prop);
6901 
6902     return;
6903 }
6904 
6905 }; //end namespace qcamera
6906