• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2013, 2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 
32 #include <cutils/properties.h>
33 #include <hardware/camera3.h>
34 #include <camera/CameraMetadata.h>
35 #include <stdlib.h>
36 #include <utils/Log.h>
37 #include <utils/Errors.h>
38 #include <ui/Fence.h>
39 #include <gralloc_priv.h>
40 #include "QCamera3HWI.h"
41 #include "QCamera3Mem.h"
42 #include "QCamera3Channel.h"
43 #include "QCamera3PostProc.h"
44 
45 using namespace android;
46 
47 namespace qcamera {
48 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
49 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
50 parm_buffer_t *prevSettings;
51 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
52 
53 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
54     PTHREAD_MUTEX_INITIALIZER;
55 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
56 
57 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
58     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
59     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
60     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
61     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
62     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
63     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
64     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
65     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
66     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
67 };
68 
69 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
70     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
71     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
72     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
73     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
74     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
75     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
76     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
77     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
78     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
79 };
80 
81 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
82     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
83     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
84     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
85     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
86     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
87     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
88     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
89     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
90     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
91     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
92     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
93     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
94     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
95     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
96     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
97 };
98 
99 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
100     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
101     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
102     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
103     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
104     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
105     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
106 };
107 
108 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
109     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
110     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
111     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
112     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
113 };
114 
115 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
116     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
117     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
118     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
119     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
120     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
121 };
122 
123 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
124     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
125     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
126     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
127 };
128 
129 const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
130                                              320, 240, 176, 144, 0, 0};
131 
132 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
133     initialize:                         QCamera3HardwareInterface::initialize,
134     configure_streams:                  QCamera3HardwareInterface::configure_streams,
135     register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
136     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
137     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
138     get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
139     dump:                               QCamera3HardwareInterface::dump,
140 };
141 
142 
143 /*===========================================================================
144  * FUNCTION   : QCamera3HardwareInterface
145  *
146  * DESCRIPTION: constructor of QCamera3HardwareInterface
147  *
148  * PARAMETERS :
149  *   @cameraId  : camera ID
150  *
151  * RETURN     : none
152  *==========================================================================*/
QCamera3HardwareInterface(int cameraId)153 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
154     : mCameraId(cameraId),
155       mCameraHandle(NULL),
156       mCameraOpened(false),
157       mCameraInitialized(false),
158       mCallbackOps(NULL),
159       mInputStream(NULL),
160       mMetadataChannel(NULL),
161       mPictureChannel(NULL),
162       mFirstRequest(false),
163       mParamHeap(NULL),
164       mParameters(NULL),
165       mJpegSettings(NULL),
166       mIsZslMode(false),
167       m_pPowerModule(NULL),
168       mPrecaptureId(0)
169 {
170     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
171     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
172     mCameraDevice.common.close = close_camera_device;
173     mCameraDevice.ops = &mCameraOps;
174     mCameraDevice.priv = this;
175     gCamCapability[cameraId]->version = CAM_HAL_V3;
176     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
177     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
178     gCamCapability[cameraId]->min_num_pp_bufs = 3;
179 
180     pthread_cond_init(&mRequestCond, NULL);
181     mPendingRequest = 0;
182     mCurrentRequestId = -1;
183     pthread_mutex_init(&mMutex, NULL);
184 
185     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
186         mDefaultMetadata[i] = NULL;
187 
188 #ifdef HAS_MULTIMEDIA_HINTS
189     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
190         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
191     }
192 #endif
193 }
194 
195 /*===========================================================================
196  * FUNCTION   : ~QCamera3HardwareInterface
197  *
198  * DESCRIPTION: destructor of QCamera3HardwareInterface
199  *
200  * PARAMETERS : none
201  *
202  * RETURN     : none
203  *==========================================================================*/
~QCamera3HardwareInterface()204 QCamera3HardwareInterface::~QCamera3HardwareInterface()
205 {
206     ALOGV("%s: E", __func__);
207     /* We need to stop all streams before deleting any stream */
208     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
209         it != mStreamInfo.end(); it++) {
210         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
211         if (channel)
212            channel->stop();
213     }
214     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
215         it != mStreamInfo.end(); it++) {
216         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
217         if (channel)
218             delete channel;
219         free (*it);
220     }
221 
222     mPictureChannel = NULL;
223 
224     if (mJpegSettings != NULL) {
225         free(mJpegSettings);
226         mJpegSettings = NULL;
227     }
228 
229     /* Clean up all channels */
230     if (mCameraInitialized) {
231         if (mMetadataChannel) {
232             mMetadataChannel->stop();
233             delete mMetadataChannel;
234             mMetadataChannel = NULL;
235         }
236         deinitParameters();
237     }
238 
239     if (mCameraOpened)
240         closeCamera();
241 
242     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
243         if (mDefaultMetadata[i])
244             free_camera_metadata(mDefaultMetadata[i]);
245 
246     pthread_cond_destroy(&mRequestCond);
247 
248     pthread_mutex_destroy(&mMutex);
249     ALOGV("%s: X", __func__);
250 }
251 
252 /*===========================================================================
253  * FUNCTION   : openCamera
254  *
255  * DESCRIPTION: open camera
256  *
257  * PARAMETERS :
258  *   @hw_device  : double ptr for camera device struct
259  *
260  * RETURN     : int32_t type of status
261  *              NO_ERROR  -- success
262  *              none-zero failure code
263  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)264 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
265 {
266     int rc = 0;
267     pthread_mutex_lock(&mCameraSessionLock);
268     if (mCameraSessionActive) {
269         ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
270         pthread_mutex_unlock(&mCameraSessionLock);
271         return -EUSERS;
272     }
273 
274     if (mCameraOpened) {
275         *hw_device = NULL;
276         return PERMISSION_DENIED;
277     }
278 
279     rc = openCamera();
280     if (rc == 0) {
281         *hw_device = &mCameraDevice.common;
282         mCameraSessionActive = 1;
283     } else
284         *hw_device = NULL;
285 
286 #ifdef HAS_MULTIMEDIA_HINTS
287     if (rc == 0) {
288         if (m_pPowerModule) {
289             if (m_pPowerModule->powerHint) {
290                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
291                         (void *)"state=1");
292             }
293         }
294     }
295 #endif
296     pthread_mutex_unlock(&mCameraSessionLock);
297     return rc;
298 }
299 
300 /*===========================================================================
301  * FUNCTION   : openCamera
302  *
303  * DESCRIPTION: open camera
304  *
305  * PARAMETERS : none
306  *
307  * RETURN     : int32_t type of status
308  *              NO_ERROR  -- success
309  *              none-zero failure code
310  *==========================================================================*/
openCamera()311 int QCamera3HardwareInterface::openCamera()
312 {
313     if (mCameraHandle) {
314         ALOGE("Failure: Camera already opened");
315         return ALREADY_EXISTS;
316     }
317     mCameraHandle = camera_open(mCameraId);
318     if (!mCameraHandle) {
319         ALOGE("camera_open failed.");
320         return UNKNOWN_ERROR;
321     }
322 
323     mCameraOpened = true;
324 
325     return NO_ERROR;
326 }
327 
328 /*===========================================================================
329  * FUNCTION   : closeCamera
330  *
331  * DESCRIPTION: close camera
332  *
333  * PARAMETERS : none
334  *
335  * RETURN     : int32_t type of status
336  *              NO_ERROR  -- success
337  *              none-zero failure code
338  *==========================================================================*/
closeCamera()339 int QCamera3HardwareInterface::closeCamera()
340 {
341     int rc = NO_ERROR;
342 
343     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
344     mCameraHandle = NULL;
345     mCameraOpened = false;
346 
347 #ifdef HAS_MULTIMEDIA_HINTS
348     if (rc == NO_ERROR) {
349         if (m_pPowerModule) {
350             if (m_pPowerModule->powerHint) {
351                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
352                         (void *)"state=0");
353             }
354         }
355     }
356 #endif
357 
358     return rc;
359 }
360 
361 /*===========================================================================
362  * FUNCTION   : initialize
363  *
364  * DESCRIPTION: Initialize frameworks callback functions
365  *
366  * PARAMETERS :
367  *   @callback_ops : callback function to frameworks
368  *
369  * RETURN     :
370  *
371  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)372 int QCamera3HardwareInterface::initialize(
373         const struct camera3_callback_ops *callback_ops)
374 {
375     int rc;
376 
377     pthread_mutex_lock(&mMutex);
378 
379     rc = initParameters();
380     if (rc < 0) {
381         ALOGE("%s: initParamters failed %d", __func__, rc);
382        goto err1;
383     }
384 
385     mCallbackOps = callback_ops;
386 
387     pthread_mutex_unlock(&mMutex);
388     mCameraInitialized = true;
389     return 0;
390 
391 err1:
392     pthread_mutex_unlock(&mMutex);
393     return rc;
394 }
395 
396 /*===========================================================================
397  * FUNCTION   : configureStreams
398  *
399  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
400  *              and output streams.
401  *
402  * PARAMETERS :
403  *   @stream_list : streams to be configured
404  *
405  * RETURN     :
406  *
407  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)408 int QCamera3HardwareInterface::configureStreams(
409         camera3_stream_configuration_t *streamList)
410 {
411     int rc = 0;
412     // Sanity check stream_list
413     if (streamList == NULL) {
414         ALOGE("%s: NULL stream configuration", __func__);
415         return BAD_VALUE;
416     }
417 
418     if (streamList->streams == NULL) {
419         ALOGE("%s: NULL stream list", __func__);
420         return BAD_VALUE;
421     }
422 
423     if (streamList->num_streams < 1) {
424         ALOGE("%s: Bad number of streams requested: %d", __func__,
425                 streamList->num_streams);
426         return BAD_VALUE;
427     }
428 
429     camera3_stream_t *inputStream = NULL;
430     camera3_stream_t *jpegStream = NULL;
431     /* first invalidate all the steams in the mStreamList
432      * if they appear again, they will be validated */
433     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
434             it != mStreamInfo.end(); it++) {
435         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
436         channel->stop();
437         (*it)->status = INVALID;
438     }
439 
440     if (mMetadataChannel) {
441         /* If content of mStreamInfo is not 0, there is metadata stream */
442         mMetadataChannel->stop();
443     }
444     // Acquire Mutex after stoping all the channels
445     pthread_mutex_lock(&mMutex);
446     for (size_t i = 0; i < streamList->num_streams; i++) {
447         camera3_stream_t *newStream = streamList->streams[i];
448         ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
449                 __func__, newStream->stream_type, newStream->format,
450                  newStream->width, newStream->height);
451         //if the stream is in the mStreamList validate it
452         bool stream_exists = false;
453         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
454                 it != mStreamInfo.end(); it++) {
455             if ((*it)->stream == newStream) {
456                 QCamera3Channel *channel =
457                     (QCamera3Channel*)(*it)->stream->priv;
458                 stream_exists = true;
459                 (*it)->status = RECONFIGURE;
460                 /*delete the channel object associated with the stream because
461                   we need to reconfigure*/
462                 delete channel;
463                 (*it)->stream->priv = NULL;
464             }
465         }
466         if (!stream_exists) {
467             //new stream
468             stream_info_t* stream_info;
469             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
470             stream_info->stream = newStream;
471             stream_info->status = VALID;
472             stream_info->registered = 0;
473             mStreamInfo.push_back(stream_info);
474         }
475         if (newStream->stream_type == CAMERA3_STREAM_INPUT
476                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
477             if (inputStream != NULL) {
478                 ALOGE("%s: Multiple input streams requested!", __func__);
479                 pthread_mutex_unlock(&mMutex);
480                 return BAD_VALUE;
481             }
482             inputStream = newStream;
483         }
484         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
485             jpegStream = newStream;
486         }
487     }
488     mInputStream = inputStream;
489 
490     /*clean up invalid streams*/
491     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
492             it != mStreamInfo.end();) {
493         if(((*it)->status) == INVALID){
494             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
495             delete channel;
496             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
497             free(*it);
498             it = mStreamInfo.erase(it);
499         } else {
500             it++;
501         }
502     }
503 
504     if (mMetadataChannel) {
505         delete mMetadataChannel;
506         mMetadataChannel = NULL;
507     }
508 
509     //Create metadata channel and initialize it
510     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
511                     mCameraHandle->ops, captureResultCb,
512                     &gCamCapability[mCameraId]->padding_info, this);
513     if (mMetadataChannel == NULL) {
514         ALOGE("%s: failed to allocate metadata channel", __func__);
515         rc = -ENOMEM;
516         pthread_mutex_unlock(&mMutex);
517         return rc;
518     }
519     rc = mMetadataChannel->initialize();
520     if (rc < 0) {
521         ALOGE("%s: metadata channel initialization failed", __func__);
522         delete mMetadataChannel;
523         mMetadataChannel = NULL;
524         pthread_mutex_unlock(&mMutex);
525         return rc;
526     }
527 
528     /* Allocate channel objects for the requested streams */
529     for (size_t i = 0; i < streamList->num_streams; i++) {
530         camera3_stream_t *newStream = streamList->streams[i];
531         if (newStream->priv == NULL) {
532             //New stream, construct channel
533             switch (newStream->stream_type) {
534             case CAMERA3_STREAM_INPUT:
535                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
536                 break;
537             case CAMERA3_STREAM_BIDIRECTIONAL:
538                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
539                     GRALLOC_USAGE_HW_CAMERA_WRITE;
540                 break;
541             case CAMERA3_STREAM_OUTPUT:
542                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
543                 break;
544             default:
545                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
546                 break;
547             }
548 
549             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
550                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
551                 QCamera3Channel *channel;
552                 switch (newStream->format) {
553                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
554                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
555                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
556                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
557                         jpegStream) {
558                         uint32_t width = jpegStream->width;
559                         uint32_t height = jpegStream->height;
560                         mIsZslMode = true;
561                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
562                             mCameraHandle->ops, captureResultCb,
563                             &gCamCapability[mCameraId]->padding_info, this, newStream,
564                             width, height);
565                     } else
566                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
567                             mCameraHandle->ops, captureResultCb,
568                             &gCamCapability[mCameraId]->padding_info, this, newStream);
569                     if (channel == NULL) {
570                         ALOGE("%s: allocation of channel failed", __func__);
571                         pthread_mutex_unlock(&mMutex);
572                         return -ENOMEM;
573                     }
574 
575                     newStream->priv = channel;
576                     break;
577                 case HAL_PIXEL_FORMAT_BLOB:
578                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
579                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
580                             mCameraHandle->ops, captureResultCb,
581                             &gCamCapability[mCameraId]->padding_info, this, newStream);
582                     if (mPictureChannel == NULL) {
583                         ALOGE("%s: allocation of channel failed", __func__);
584                         pthread_mutex_unlock(&mMutex);
585                         return -ENOMEM;
586                     }
587                     newStream->priv = (QCamera3Channel*)mPictureChannel;
588                     break;
589 
590                 //TODO: Add support for app consumed format?
591                 default:
592                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
593                     break;
594                 }
595             }
596         } else {
597             // Channel already exists for this stream
598             // Do nothing for now
599         }
600     }
601 
602     mPendingBuffersMap.clear();
603     /*For the streams to be reconfigured we need to register the buffers
604       since the framework wont*/
605     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
606             it != mStreamInfo.end(); it++) {
607         if ((*it)->status == RECONFIGURE) {
608             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
609             /*only register buffers for streams that have already been
610               registered*/
611             if ((*it)->registered) {
612                 rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
613                         (*it)->buffer_set.buffers);
614                 if (rc != NO_ERROR) {
615                     ALOGE("%s: Failed to register the buffers of old stream,\
616                             rc = %d", __func__, rc);
617                 }
618                 ALOGV("%s: channel %p has %d buffers",
619                         __func__, channel, (*it)->buffer_set.num_buffers);
620             }
621         }
622 
623         mPendingBuffersMap.add((*it)->stream, 0);
624     }
625 
626     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
627     mPendingRequestsList.clear();
628 
629     //settings/parameters don't carry over for new configureStreams
630     memset(mParameters, 0, sizeof(parm_buffer_t));
631     mFirstRequest = true;
632 
633     pthread_mutex_unlock(&mMutex);
634     return rc;
635 }
636 
637 /*===========================================================================
638  * FUNCTION   : validateCaptureRequest
639  *
640  * DESCRIPTION: validate a capture request from camera service
641  *
642  * PARAMETERS :
643  *   @request : request from framework to process
644  *
645  * RETURN     :
646  *
647  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)648 int QCamera3HardwareInterface::validateCaptureRequest(
649                     camera3_capture_request_t *request)
650 {
651     ssize_t idx = 0;
652     const camera3_stream_buffer_t *b;
653     CameraMetadata meta;
654 
655     /* Sanity check the request */
656     if (request == NULL) {
657         ALOGE("%s: NULL capture request", __func__);
658         return BAD_VALUE;
659     }
660 
661     uint32_t frameNumber = request->frame_number;
662     if (request->input_buffer != NULL &&
663             request->input_buffer->stream != mInputStream) {
664         ALOGE("%s: Request %d: Input buffer not from input stream!",
665                 __FUNCTION__, frameNumber);
666         return BAD_VALUE;
667     }
668     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
669         ALOGE("%s: Request %d: No output buffers provided!",
670                 __FUNCTION__, frameNumber);
671         return BAD_VALUE;
672     }
673     if (request->input_buffer != NULL) {
674         b = request->input_buffer;
675         QCamera3Channel *channel =
676             static_cast<QCamera3Channel*>(b->stream->priv);
677         if (channel == NULL) {
678             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
679                     __func__, frameNumber, idx);
680             return BAD_VALUE;
681         }
682         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
683             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
684                     __func__, frameNumber, idx);
685             return BAD_VALUE;
686         }
687         if (b->release_fence != -1) {
688             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
689                     __func__, frameNumber, idx);
690             return BAD_VALUE;
691         }
692         if (b->buffer == NULL) {
693             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
694                     __func__, frameNumber, idx);
695             return BAD_VALUE;
696         }
697     }
698 
699     // Validate all buffers
700     b = request->output_buffers;
701     do {
702         QCamera3Channel *channel =
703                 static_cast<QCamera3Channel*>(b->stream->priv);
704         if (channel == NULL) {
705             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
706                     __func__, frameNumber, idx);
707             return BAD_VALUE;
708         }
709         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
710             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
711                     __func__, frameNumber, idx);
712             return BAD_VALUE;
713         }
714         if (b->release_fence != -1) {
715             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
716                     __func__, frameNumber, idx);
717             return BAD_VALUE;
718         }
719         if (b->buffer == NULL) {
720             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
721                     __func__, frameNumber, idx);
722             return BAD_VALUE;
723         }
724         idx++;
725         b = request->output_buffers + idx;
726     } while (idx < (ssize_t)request->num_output_buffers);
727 
728     return NO_ERROR;
729 }
730 
731 /*===========================================================================
732  * FUNCTION   : registerStreamBuffers
733  *
734  * DESCRIPTION: Register buffers for a given stream with the HAL device.
735  *
736  * PARAMETERS :
737  *   @stream_list : streams to be configured
738  *
739  * RETURN     :
740  *
741  *==========================================================================*/
registerStreamBuffers(const camera3_stream_buffer_set_t * buffer_set)742 int QCamera3HardwareInterface::registerStreamBuffers(
743         const camera3_stream_buffer_set_t *buffer_set)
744 {
745     int rc = 0;
746 
747     pthread_mutex_lock(&mMutex);
748 
749     if (buffer_set == NULL) {
750         ALOGE("%s: Invalid buffer_set parameter.", __func__);
751         pthread_mutex_unlock(&mMutex);
752         return -EINVAL;
753     }
754     if (buffer_set->stream == NULL) {
755         ALOGE("%s: Invalid stream parameter.", __func__);
756         pthread_mutex_unlock(&mMutex);
757         return -EINVAL;
758     }
759     if (buffer_set->num_buffers < 1) {
760         ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
761         pthread_mutex_unlock(&mMutex);
762         return -EINVAL;
763     }
764     if (buffer_set->buffers == NULL) {
765         ALOGE("%s: Invalid buffers parameter.", __func__);
766         pthread_mutex_unlock(&mMutex);
767         return -EINVAL;
768     }
769 
770     camera3_stream_t *stream = buffer_set->stream;
771     QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
772 
773     //set the buffer_set in the mStreamInfo array
774     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
775             it != mStreamInfo.end(); it++) {
776         if ((*it)->stream == stream) {
777             uint32_t numBuffers = buffer_set->num_buffers;
778             (*it)->buffer_set.stream = buffer_set->stream;
779             (*it)->buffer_set.num_buffers = numBuffers;
780             (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
781             if ((*it)->buffer_set.buffers == NULL) {
782                 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
783                 pthread_mutex_unlock(&mMutex);
784                 return -ENOMEM;
785             }
786             for (size_t j = 0; j < numBuffers; j++){
787                 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
788             }
789             (*it)->registered = 1;
790         }
791     }
792     rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
793     if (rc < 0) {
794         ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
795         pthread_mutex_unlock(&mMutex);
796         return -ENODEV;
797     }
798 
799     pthread_mutex_unlock(&mMutex);
800     return NO_ERROR;
801 }
802 
803 /*===========================================================================
804  * FUNCTION   : processCaptureRequest
805  *
806  * DESCRIPTION: process a capture request from camera service
807  *
808  * PARAMETERS :
809  *   @request : request from framework to process
810  *
811  * RETURN     :
812  *
813  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)814 int QCamera3HardwareInterface::processCaptureRequest(
815                     camera3_capture_request_t *request)
816 {
817     int rc = NO_ERROR;
818     int32_t request_id;
819     CameraMetadata meta;
820 
821     pthread_mutex_lock(&mMutex);
822 
823     rc = validateCaptureRequest(request);
824     if (rc != NO_ERROR) {
825         ALOGE("%s: incoming request is not valid", __func__);
826         pthread_mutex_unlock(&mMutex);
827         return rc;
828     }
829 
830     uint32_t frameNumber = request->frame_number;
831     uint32_t streamTypeMask = 0;
832 
833     meta = request->settings;
834     if (meta.exists(ANDROID_REQUEST_ID)) {
835         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
836         mCurrentRequestId = request_id;
837         ALOGV("%s: Received request with id: %d",__func__, request_id);
838     } else if (mFirstRequest || mCurrentRequestId == -1){
839         ALOGE("%s: Unable to find request id field, \
840                 & no previous id available", __func__);
841         return NAME_NOT_FOUND;
842     } else {
843         ALOGV("%s: Re-using old request id", __func__);
844         request_id = mCurrentRequestId;
845     }
846 
847     ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
848                                     __func__, __LINE__,
849                                     request->num_output_buffers,
850                                     request->input_buffer,
851                                     frameNumber);
852     // Acquire all request buffers first
853     int blob_request = 0;
854     for (size_t i = 0; i < request->num_output_buffers; i++) {
855         const camera3_stream_buffer_t& output = request->output_buffers[i];
856         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
857         sp<Fence> acquireFence = new Fence(output.acquire_fence);
858 
859         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
860         //Call function to store local copy of jpeg data for encode params.
861             blob_request = 1;
862             rc = getJpegSettings(request->settings);
863             if (rc < 0) {
864                 ALOGE("%s: failed to get jpeg parameters", __func__);
865                 pthread_mutex_unlock(&mMutex);
866                 return rc;
867             }
868         }
869 
870         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
871         if (rc != OK) {
872             ALOGE("%s: fence wait failed %d", __func__, rc);
873             pthread_mutex_unlock(&mMutex);
874             return rc;
875         }
876         streamTypeMask |= channel->getStreamTypeMask();
877     }
878 
879     PendingRequestInfo pendingRequest;
880     pendingRequest.frame_number = frameNumber;
881     pendingRequest.num_buffers = request->num_output_buffers;
882     pendingRequest.request_id = request_id;
883     pendingRequest.blob_request = blob_request;
884     pendingRequest.ae_trigger.trigger_id = mPrecaptureId;
885     pendingRequest.ae_trigger.trigger = CAM_AEC_TRIGGER_IDLE;
886 
887     rc = setFrameParameters(request->frame_number, request->settings,
888             streamTypeMask, pendingRequest.ae_trigger);
889     if (rc < 0) {
890         ALOGE("%s: fail to set frame parameters", __func__);
891         pthread_mutex_unlock(&mMutex);
892         return rc;
893     }
894 
895     for (size_t i = 0; i < request->num_output_buffers; i++) {
896         RequestedBufferInfo requestedBuf;
897         requestedBuf.stream = request->output_buffers[i].stream;
898         requestedBuf.buffer = NULL;
899         pendingRequest.buffers.push_back(requestedBuf);
900 
901         mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
902     }
903     mPendingRequestsList.push_back(pendingRequest);
904 
905     // Notify metadata channel we receive a request
906     mMetadataChannel->request(NULL, frameNumber);
907 
908     // Call request on other streams
909     for (size_t i = 0; i < request->num_output_buffers; i++) {
910         const camera3_stream_buffer_t& output = request->output_buffers[i];
911         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
912         mm_camera_buf_def_t *pInputBuffer = NULL;
913 
914         if (channel == NULL) {
915             ALOGE("%s: invalid channel pointer for stream", __func__);
916             continue;
917         }
918 
919         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
920             QCamera3RegularChannel* inputChannel = NULL;
921             if(request->input_buffer != NULL){
922 
923                 //Try to get the internal format
924                 inputChannel = (QCamera3RegularChannel*)
925                     request->input_buffer->stream->priv;
926                 if(inputChannel == NULL ){
927                     ALOGE("%s: failed to get input channel handle", __func__);
928                 } else {
929                     pInputBuffer =
930                         inputChannel->getInternalFormatBuffer(
931                                 request->input_buffer->buffer);
932                     ALOGD("%s: Input buffer dump",__func__);
933                     ALOGD("Stream id: %d", pInputBuffer->stream_id);
934                     ALOGD("streamtype:%d", pInputBuffer->stream_type);
935                     ALOGD("frame len:%d", pInputBuffer->frame_len);
936                 }
937             }
938             rc = channel->request(output.buffer, frameNumber, mJpegSettings,
939                             pInputBuffer,(QCamera3Channel*)inputChannel);
940         } else {
941             ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
942                 __LINE__, output.buffer, frameNumber);
943             rc = channel->request(output.buffer, frameNumber);
944         }
945         if (rc < 0)
946             ALOGE("%s: request failed", __func__);
947     }
948 
949     mFirstRequest = false;
950 
951     //Block on conditional variable
952     mPendingRequest = 1;
953     while (mPendingRequest == 1) {
954         pthread_cond_wait(&mRequestCond, &mMutex);
955     }
956 
957     pthread_mutex_unlock(&mMutex);
958     return rc;
959 }
960 
961 /*===========================================================================
962  * FUNCTION   : getMetadataVendorTagOps
963  *
964  * DESCRIPTION:
965  *
966  * PARAMETERS :
967  *
968  *
969  * RETURN     :
970  *==========================================================================*/
getMetadataVendorTagOps(vendor_tag_query_ops_t *)971 void QCamera3HardwareInterface::getMetadataVendorTagOps(
972                     vendor_tag_query_ops_t* /*ops*/)
973 {
974     /* Enable locks when we eventually add Vendor Tags */
975     /*
976     pthread_mutex_lock(&mMutex);
977 
978     pthread_mutex_unlock(&mMutex);
979     */
980     return;
981 }
982 
983 /*===========================================================================
984  * FUNCTION   : dump
985  *
986  * DESCRIPTION:
987  *
988  * PARAMETERS :
989  *
990  *
991  * RETURN     :
992  *==========================================================================*/
dump(int)993 void QCamera3HardwareInterface::dump(int /*fd*/)
994 {
995     /*Enable lock when we implement this function*/
996     /*
997     pthread_mutex_lock(&mMutex);
998 
999     pthread_mutex_unlock(&mMutex);
1000     */
1001     return;
1002 }
1003 
1004 
1005 /*===========================================================================
1006  * FUNCTION   : captureResultCb
1007  *
1008  * DESCRIPTION: Callback handler for all capture result
1009  *              (streams, as well as metadata)
1010  *
1011  * PARAMETERS :
1012  *   @metadata : metadata information
1013  *   @buffer   : actual gralloc buffer to be returned to frameworks.
1014  *               NULL if metadata.
1015  *
1016  * RETURN     : NONE
1017  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number)1018 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
1019                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
1020 {
1021     pthread_mutex_lock(&mMutex);
1022 
1023     if (metadata_buf) {
1024         metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1025         int32_t frame_number_valid = *(int32_t *)
1026             POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1027         uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1028             CAM_INTF_META_PENDING_REQUESTS, metadata);
1029         uint32_t frame_number = *(uint32_t *)
1030             POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1031         const struct timeval *tv = (const struct timeval *)
1032             POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1033         nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1034             tv->tv_usec * NSEC_PER_USEC;
1035         bool frame_number_exists = FALSE;
1036 
1037         if (!frame_number_valid) {
1038             ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
1039             mMetadataChannel->bufDone(metadata_buf);
1040             free(metadata_buf);
1041             goto done_metadata;
1042         }
1043         ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
1044                 frame_number, capture_time);
1045 
1046         // Go through the pending requests info and send shutter/results to frameworks
1047         for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1048                 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1049             camera3_capture_result_t result;
1050             camera3_notify_msg_t notify_msg;
1051             ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1052             frame_number_exists = TRUE; // This frame number exists in Pending list
1053             // Flush out all entries with less or equal frame numbers.
1054 
1055             //TODO: Make sure shutter timestamp really reflects shutter timestamp.
1056             //Right now it's the same as metadata timestamp
1057 
1058             //TODO: When there is metadata drop, how do we derive the timestamp of
1059             //dropped frames? For now, we fake the dropped timestamp by substracting
1060             //from the reported timestamp
1061             nsecs_t current_capture_time = capture_time -
1062                 (frame_number - i->frame_number) * NSEC_PER_33MSEC;
1063 
1064             // Send shutter notify to frameworks
1065             notify_msg.type = CAMERA3_MSG_SHUTTER;
1066             notify_msg.message.shutter.frame_number = i->frame_number;
1067             notify_msg.message.shutter.timestamp = current_capture_time;
1068             mCallbackOps->notify(mCallbackOps, &notify_msg);
1069             ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
1070                     i->frame_number, capture_time);
1071 
1072             // Send empty metadata with already filled buffers for dropped metadata
1073             // and send valid metadata with already filled buffers for current metadata
1074             if (i->frame_number < frame_number) {
1075                 CameraMetadata dummyMetadata;
1076                 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1077                         &current_capture_time, 1);
1078                 dummyMetadata.update(ANDROID_REQUEST_ID,
1079                         &(i->request_id), 1);
1080                 result.result = dummyMetadata.release();
1081             } else {
1082                 result.result = translateCbMetadataToResultMetadata(metadata,
1083                         current_capture_time, i->request_id, i->ae_trigger);
1084 
1085                 if (i->blob_request && needReprocess()) {
1086                    //If it is a blob request then send the metadata to the picture channel
1087                    mPictureChannel->queueMetadata(metadata_buf);
1088 
1089                 } else {
1090                    // Return metadata buffer
1091                    mMetadataChannel->bufDone(metadata_buf);
1092                    free(metadata_buf);
1093                 }
1094             }
1095             if (!result.result) {
1096                 ALOGE("%s: metadata is NULL", __func__);
1097             }
1098             result.frame_number = i->frame_number;
1099             result.num_output_buffers = 0;
1100             result.output_buffers = NULL;
1101             result.input_buffer = NULL;
1102             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1103                     j != i->buffers.end(); j++) {
1104                 if (j->buffer) {
1105                     result.num_output_buffers++;
1106                 }
1107             }
1108 
1109             if (result.num_output_buffers > 0) {
1110                 camera3_stream_buffer_t *result_buffers =
1111                     new camera3_stream_buffer_t[result.num_output_buffers];
1112                 if (!result_buffers) {
1113                     ALOGE("%s: Fatal error: out of memory", __func__);
1114                 }
1115                 size_t result_buffers_idx = 0;
1116                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1117                         j != i->buffers.end(); j++) {
1118                     if (j->buffer) {
1119                         result_buffers[result_buffers_idx++] = *(j->buffer);
1120                         free(j->buffer);
1121                         j->buffer = NULL;
1122                         mPendingBuffersMap.editValueFor(j->stream)--;
1123                     }
1124                 }
1125                 result.output_buffers = result_buffers;
1126 
1127                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1128                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1129                         __func__, result.frame_number, current_capture_time);
1130                 free_camera_metadata((camera_metadata_t *)result.result);
1131                 delete[] result_buffers;
1132             } else {
1133                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1134                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1135                         __func__, result.frame_number, current_capture_time);
1136                 free_camera_metadata((camera_metadata_t *)result.result);
1137             }
1138             // erase the element from the list
1139             i = mPendingRequestsList.erase(i);
1140         }
1141         if (!frame_number_exists) {
1142             ALOGD("%s: Frame number# %d not in the Pending Request list", __func__,
1143                     frame_number);
1144             // Race condition where in Metadata Frame# is valid but its not in Pending list
1145             mMetadataChannel->bufDone(metadata_buf);
1146             free(metadata_buf);
1147         }
1148 
1149 done_metadata:
1150         bool max_buffers_dequeued = false;
1151         for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
1152             const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
1153             uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
1154             if (queued_buffers == stream->max_buffers) {
1155                 max_buffers_dequeued = true;
1156                 break;
1157             }
1158         }
1159         if (!max_buffers_dequeued && !pending_requests) {
1160             // Unblock process_capture_request
1161             mPendingRequest = 0;
1162             pthread_cond_signal(&mRequestCond);
1163         }
1164     } else {
1165         // If the frame number doesn't exist in the pending request list,
1166         // directly send the buffer to the frameworks, and update pending buffers map
1167         // Otherwise, book-keep the buffer.
1168         List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1169         while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1170             i++;
1171         }
1172         if (i == mPendingRequestsList.end()) {
1173             // Verify all pending requests frame_numbers are greater
1174             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1175                     j != mPendingRequestsList.end(); j++) {
1176                 if (j->frame_number < frame_number) {
1177                     ALOGE("%s: Error: pending frame number %d is smaller than %d",
1178                             __func__, j->frame_number, frame_number);
1179                 }
1180             }
1181             camera3_capture_result_t result;
1182             result.result = NULL;
1183             result.frame_number = frame_number;
1184             result.num_output_buffers = 1;
1185             result.output_buffers = buffer;
1186             result.input_buffer = NULL;
1187             ALOGV("%s: result frame_number = %d, buffer = %p",
1188                     __func__, frame_number, buffer);
1189             mPendingBuffersMap.editValueFor(buffer->stream)--;
1190             mCallbackOps->process_capture_result(mCallbackOps, &result);
1191         } else {
1192             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1193                     j != i->buffers.end(); j++) {
1194                 if (j->stream == buffer->stream) {
1195                     if (j->buffer != NULL) {
1196                         ALOGE("%s: Error: buffer is already set", __func__);
1197                     } else {
1198                         j->buffer = (camera3_stream_buffer_t *)malloc(
1199                                 sizeof(camera3_stream_buffer_t));
1200                         *(j->buffer) = *buffer;
1201                         ALOGV("%s: cache buffer %p at result frame_number %d",
1202                                 __func__, buffer, frame_number);
1203                     }
1204                 }
1205             }
1206         }
1207     }
1208     pthread_mutex_unlock(&mMutex);
1209     return;
1210 }
1211 
1212 /*===========================================================================
1213  * FUNCTION   : translateCbMetadataToResultMetadata
1214  *
1215  * DESCRIPTION:
1216  *
1217  * PARAMETERS :
1218  *   @metadata : metadata information from callback
1219  *
1220  * RETURN     : camera_metadata_t*
1221  *              metadata in a format specified by fwk
1222  *==========================================================================*/
1223 camera_metadata_t*
translateCbMetadataToResultMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const cam_trigger_t & aeTrigger)1224 QCamera3HardwareInterface::translateCbMetadataToResultMetadata
1225                                 (metadata_buffer_t *metadata, nsecs_t timestamp,
1226                                  int32_t request_id, const cam_trigger_t &aeTrigger)
1227 {
1228     CameraMetadata camMetadata;
1229     camera_metadata_t* resultMetadata;
1230 
1231     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
1232     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
1233 
1234     /*CAM_INTF_META_HISTOGRAM - TODO*/
1235     /*cam_hist_stats_t  *histogram =
1236       (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
1237       metadata);*/
1238 
1239     /*face detection*/
1240     cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
1241         POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
1242     uint8_t numFaces = faceDetectionInfo->num_faces_detected;
1243     int32_t faceIds[numFaces];
1244     uint8_t faceScores[numFaces];
1245     int32_t faceRectangles[numFaces * 4];
1246     int32_t faceLandmarks[numFaces * 6];
1247     int j = 0, k = 0;
1248     for (int i = 0; i < numFaces; i++) {
1249         faceIds[i] = faceDetectionInfo->faces[i].face_id;
1250         faceScores[i] = faceDetectionInfo->faces[i].score;
1251         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
1252                 faceRectangles+j, -1);
1253         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
1254         j+= 4;
1255         k+= 6;
1256     }
1257     if (numFaces > 0) {
1258         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
1259         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
1260         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
1261             faceRectangles, numFaces*4);
1262         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
1263             faceLandmarks, numFaces*6);
1264     }
1265 
1266     uint8_t  *color_correct_mode =
1267         (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
1268     camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
1269 
1270     camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
1271             &aeTrigger.trigger_id, 1);
1272 
1273     /*aec regions*/
1274     cam_area_t  *hAeRegions =
1275         (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
1276     int32_t aeRegions[5];
1277     convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
1278     camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
1279     if(mIsZslMode) {
1280         uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1281         camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1282     } else {
1283         uint8_t ae_state =
1284             *(uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
1285         //Override AE state for front(YUV) sensor if corresponding request
1286         //contain a precapture trigger. This is to work around the precapture
1287         //trigger timeout for YUV sensor.
1288         if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT &&
1289                 aeTrigger.trigger_id > 0 && aeTrigger.trigger ==
1290                 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START) {
1291             ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
1292         }
1293         camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
1294     }
1295     uint8_t  *focusMode =
1296         (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
1297     camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
1298 
1299     /*af regions*/
1300     cam_area_t  *hAfRegions =
1301         (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
1302     int32_t afRegions[5];
1303     convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
1304     camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
1305 
1306     uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
1307     camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
1308 
1309     int32_t  *afTriggerId =
1310         (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
1311     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
1312 
1313     uint8_t  *whiteBalance =
1314         (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
1315     camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
1316 
1317     /*awb regions*/
1318     cam_area_t  *hAwbRegions =
1319         (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
1320     int32_t awbRegions[5];
1321     convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
1322     camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
1323 
1324     uint8_t  *whiteBalanceState =
1325         (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
1326     camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
1327 
1328     uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
1329     camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
1330 
1331     uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
1332     camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
1333 
1334     uint8_t  *flashPower =
1335         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
1336     camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
1337 
1338     int64_t  *flashFiringTime =
1339         (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
1340     camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
1341 
1342     /*int32_t  *ledMode =
1343       (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
1344       camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
1345 
1346     uint8_t  *flashState =
1347         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
1348     camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
1349 
1350     uint8_t  *hotPixelMode =
1351         (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
1352     camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
1353 
1354     float  *lensAperture =
1355         (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
1356     camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
1357 
1358     float  *filterDensity =
1359         (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
1360     camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
1361 
1362     float  *focalLength =
1363         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1364     camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
1365 
1366     float  *focusDistance =
1367         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
1368     camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
1369 
1370     float  *focusRange =
1371         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
1372     camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
1373 
1374     uint8_t  *opticalStab =
1375         (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
1376     camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
1377 
1378     /*int32_t  *focusState =
1379       (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
1380       camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
1381 
1382     uint8_t  *noiseRedMode =
1383         (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
1384     camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
1385 
1386     /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
1387 
1388     cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
1389         POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
1390     int32_t scalerCropRegion[4];
1391     scalerCropRegion[0] = hScalerCropRegion->left;
1392     scalerCropRegion[1] = hScalerCropRegion->top;
1393     scalerCropRegion[2] = hScalerCropRegion->width;
1394     scalerCropRegion[3] = hScalerCropRegion->height;
1395     camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
1396 
1397     int64_t  *sensorExpTime =
1398         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1399     mMetadataResponse.exposure_time = *sensorExpTime;
1400     camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
1401 
1402 
1403     int64_t  *sensorFameDuration =
1404         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
1405     camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
1406 
1407     int32_t  *sensorSensitivity =
1408         (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1409     mMetadataResponse.iso_speed = *sensorSensitivity;
1410     camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
1411 
1412     uint8_t  *shadingMode =
1413         (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
1414     camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
1415 
1416     uint8_t  *faceDetectMode =
1417         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
1418     camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
1419 
1420     uint8_t  *histogramMode =
1421         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
1422     camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
1423 
1424     uint8_t  *sharpnessMapMode =
1425         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
1426     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
1427             sharpnessMapMode, 1);
1428 
1429     /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
1430     cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
1431         POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
1432     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
1433             (int32_t*)sharpnessMap->sharpness,
1434             CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
1435 
1436     cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
1437         POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
1438     int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
1439     int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
1440     camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
1441                        (float*)lensShadingMap->lens_shading,
1442                        4*map_width*map_height);
1443 
1444     cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
1445         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
1446     camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
1447 
1448     cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
1449         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
1450     camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
1451                        (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
1452 
1453     cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
1454         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
1455     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
1456                        predColorCorrectionGains->gains, 4);
1457 
1458     cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
1459         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
1460     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
1461                        (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
1462 
1463     uint8_t *blackLevelLock = (uint8_t*)
1464         POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
1465     camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
1466 
1467     uint8_t *sceneFlicker = (uint8_t*)
1468         POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
1469     camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
1470 
1471 
1472     resultMetadata = camMetadata.release();
1473     return resultMetadata;
1474 }
1475 
1476 /*===========================================================================
1477  * FUNCTION   : convertToRegions
1478  *
1479  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
1480  *
1481  * PARAMETERS :
1482  *   @rect   : cam_rect_t struct to convert
1483  *   @region : int32_t destination array
1484  *   @weight : if we are converting from cam_area_t, weight is valid
1485  *             else weight = -1
1486  *
1487  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)1488 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
1489     region[0] = rect.left;
1490     region[1] = rect.top;
1491     region[2] = rect.left + rect.width;
1492     region[3] = rect.top + rect.height;
1493     if (weight > -1) {
1494         region[4] = weight;
1495     }
1496 }
1497 
1498 /*===========================================================================
1499  * FUNCTION   : convertFromRegions
1500  *
1501  * DESCRIPTION: helper method to convert from array to cam_rect_t
1502  *
1503  * PARAMETERS :
1504  *   @rect   : cam_rect_t struct to convert
1505  *   @region : int32_t destination array
1506  *   @weight : if we are converting from cam_area_t, weight is valid
1507  *             else weight = -1
1508  *
1509  *==========================================================================*/
convertFromRegions(cam_area_t * roi,const camera_metadata_t * settings,uint32_t tag)1510 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
1511                                                    const camera_metadata_t *settings,
1512                                                    uint32_t tag){
1513     CameraMetadata frame_settings;
1514     frame_settings = settings;
1515     int32_t x_min = frame_settings.find(tag).data.i32[0];
1516     int32_t y_min = frame_settings.find(tag).data.i32[1];
1517     int32_t x_max = frame_settings.find(tag).data.i32[2];
1518     int32_t y_max = frame_settings.find(tag).data.i32[3];
1519     roi->weight = frame_settings.find(tag).data.i32[4];
1520     roi->rect.left = x_min;
1521     roi->rect.top = y_min;
1522     roi->rect.width = x_max - x_min;
1523     roi->rect.height = y_max - y_min;
1524 }
1525 
1526 /*===========================================================================
1527  * FUNCTION   : resetIfNeededROI
1528  *
1529  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
1530  *              crop region
1531  *
1532  * PARAMETERS :
1533  *   @roi       : cam_area_t struct to resize
1534  *   @scalerCropRegion : cam_crop_region_t region to compare against
1535  *
1536  *
1537  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)1538 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
1539                                                  const cam_crop_region_t* scalerCropRegion)
1540 {
1541     int32_t roi_x_max = roi->rect.width + roi->rect.left;
1542     int32_t roi_y_max = roi->rect.height + roi->rect.top;
1543     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
1544     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
1545     if ((roi_x_max < scalerCropRegion->left) ||
1546         (roi_y_max < scalerCropRegion->top)  ||
1547         (roi->rect.left > crop_x_max) ||
1548         (roi->rect.top > crop_y_max)){
1549         return false;
1550     }
1551     if (roi->rect.left < scalerCropRegion->left) {
1552         roi->rect.left = scalerCropRegion->left;
1553     }
1554     if (roi->rect.top < scalerCropRegion->top) {
1555         roi->rect.top = scalerCropRegion->top;
1556     }
1557     if (roi_x_max > crop_x_max) {
1558         roi_x_max = crop_x_max;
1559     }
1560     if (roi_y_max > crop_y_max) {
1561         roi_y_max = crop_y_max;
1562     }
1563     roi->rect.width = roi_x_max - roi->rect.left;
1564     roi->rect.height = roi_y_max - roi->rect.top;
1565     return true;
1566 }
1567 
1568 /*===========================================================================
1569  * FUNCTION   : convertLandmarks
1570  *
1571  * DESCRIPTION: helper method to extract the landmarks from face detection info
1572  *
1573  * PARAMETERS :
1574  *   @face   : cam_rect_t struct to convert
1575  *   @landmarks : int32_t destination array
1576  *
1577  *
1578  *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)1579 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
1580 {
1581     landmarks[0] = face.left_eye_center.x;
1582     landmarks[1] = face.left_eye_center.y;
1583     landmarks[2] = face.right_eye_center.y;
1584     landmarks[3] = face.right_eye_center.y;
1585     landmarks[4] = face.mouth_center.x;
1586     landmarks[5] = face.mouth_center.y;
1587 }
1588 
1589 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
1590 /*===========================================================================
1591  * FUNCTION   : initCapabilities
1592  *
1593  * DESCRIPTION: initialize camera capabilities in static data struct
1594  *
1595  * PARAMETERS :
1596  *   @cameraId  : camera Id
1597  *
1598  * RETURN     : int32_t type of status
1599  *              NO_ERROR  -- success
1600  *              none-zero failure code
1601  *==========================================================================*/
initCapabilities(int cameraId)1602 int QCamera3HardwareInterface::initCapabilities(int cameraId)
1603 {
1604     int rc = 0;
1605     mm_camera_vtbl_t *cameraHandle = NULL;
1606     QCamera3HeapMemory *capabilityHeap = NULL;
1607 
1608     cameraHandle = camera_open(cameraId);
1609     if (!cameraHandle) {
1610         ALOGE("%s: camera_open failed", __func__);
1611         rc = -1;
1612         goto open_failed;
1613     }
1614 
1615     capabilityHeap = new QCamera3HeapMemory();
1616     if (capabilityHeap == NULL) {
1617         ALOGE("%s: creation of capabilityHeap failed", __func__);
1618         goto heap_creation_failed;
1619     }
1620     /* Allocate memory for capability buffer */
1621     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
1622     if(rc != OK) {
1623         ALOGE("%s: No memory for cappability", __func__);
1624         goto allocate_failed;
1625     }
1626 
1627     /* Map memory for capability buffer */
1628     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
1629     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
1630                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
1631                                 capabilityHeap->getFd(0),
1632                                 sizeof(cam_capability_t));
1633     if(rc < 0) {
1634         ALOGE("%s: failed to map capability buffer", __func__);
1635         goto map_failed;
1636     }
1637 
1638     /* Query Capability */
1639     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
1640     if(rc < 0) {
1641         ALOGE("%s: failed to query capability",__func__);
1642         goto query_failed;
1643     }
1644     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
1645     if (!gCamCapability[cameraId]) {
1646         ALOGE("%s: out of memory", __func__);
1647         goto query_failed;
1648     }
1649     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
1650                                         sizeof(cam_capability_t));
1651     rc = 0;
1652 
1653 query_failed:
1654     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
1655                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
1656 map_failed:
1657     capabilityHeap->deallocate();
1658 allocate_failed:
1659     delete capabilityHeap;
1660 heap_creation_failed:
1661     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
1662     cameraHandle = NULL;
1663 open_failed:
1664     return rc;
1665 }
1666 
1667 /*===========================================================================
1668  * FUNCTION   : initParameters
1669  *
1670  * DESCRIPTION: initialize camera parameters
1671  *
1672  * PARAMETERS :
1673  *
1674  * RETURN     : int32_t type of status
1675  *              NO_ERROR  -- success
1676  *              none-zero failure code
1677  *==========================================================================*/
initParameters()1678 int QCamera3HardwareInterface::initParameters()
1679 {
1680     int rc = 0;
1681 
1682     //Allocate Set Param Buffer
1683     mParamHeap = new QCamera3HeapMemory();
1684     rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
1685     if(rc != OK) {
1686         rc = NO_MEMORY;
1687         ALOGE("Failed to allocate SETPARM Heap memory");
1688         delete mParamHeap;
1689         mParamHeap = NULL;
1690         return rc;
1691     }
1692 
1693     //Map memory for parameters buffer
1694     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1695             CAM_MAPPING_BUF_TYPE_PARM_BUF,
1696             mParamHeap->getFd(0),
1697             sizeof(parm_buffer_t));
1698     if(rc < 0) {
1699         ALOGE("%s:failed to map SETPARM buffer",__func__);
1700         rc = FAILED_TRANSACTION;
1701         mParamHeap->deallocate();
1702         delete mParamHeap;
1703         mParamHeap = NULL;
1704         return rc;
1705     }
1706 
1707     mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
1708     return rc;
1709 }
1710 
1711 /*===========================================================================
1712  * FUNCTION   : deinitParameters
1713  *
1714  * DESCRIPTION: de-initialize camera parameters
1715  *
1716  * PARAMETERS :
1717  *
1718  * RETURN     : NONE
1719  *==========================================================================*/
deinitParameters()1720 void QCamera3HardwareInterface::deinitParameters()
1721 {
1722     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1723             CAM_MAPPING_BUF_TYPE_PARM_BUF);
1724 
1725     mParamHeap->deallocate();
1726     delete mParamHeap;
1727     mParamHeap = NULL;
1728 
1729     mParameters = NULL;
1730 }
1731 
1732 /*===========================================================================
1733  * FUNCTION   : calcMaxJpegSize
1734  *
1735  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
1736  *
1737  * PARAMETERS :
1738  *
1739  * RETURN     : max_jpeg_size
1740  *==========================================================================*/
calcMaxJpegSize()1741 int QCamera3HardwareInterface::calcMaxJpegSize()
1742 {
1743     int32_t max_jpeg_size = 0;
1744     int temp_width, temp_height;
1745     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1746         temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
1747         temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
1748         if (temp_width * temp_height > max_jpeg_size ) {
1749             max_jpeg_size = temp_width * temp_height;
1750         }
1751     }
1752     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1753     return max_jpeg_size;
1754 }
1755 
1756 /*===========================================================================
1757  * FUNCTION   : initStaticMetadata
1758  *
1759  * DESCRIPTION: initialize the static metadata
1760  *
1761  * PARAMETERS :
1762  *   @cameraId  : camera Id
1763  *
1764  * RETURN     : int32_t type of status
1765  *              0  -- success
1766  *              non-zero failure code
1767  *==========================================================================*/
initStaticMetadata(int cameraId)1768 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
1769 {
1770     int rc = 0;
1771     CameraMetadata staticInfo;
1772 
1773     /* android.info: hardware level */
1774     uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1775     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1776         &supportedHardwareLevel, 1);
1777 
1778     int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
1779     /*HAL 3 only*/
1780     /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1781                     &gCamCapability[cameraId]->min_focus_distance, 1); */
1782 
1783     /*hard coded for now but this should come from sensor*/
1784     float min_focus_distance;
1785     if(facingBack){
1786         min_focus_distance = 10;
1787     } else {
1788         min_focus_distance = 0;
1789     }
1790     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1791                     &min_focus_distance, 1);
1792 
1793     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1794                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
1795 
1796     /*should be using focal lengths but sensor doesn't provide that info now*/
1797     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1798                       &gCamCapability[cameraId]->focal_length,
1799                       1);
1800 
1801     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1802                       gCamCapability[cameraId]->apertures,
1803                       gCamCapability[cameraId]->apertures_count);
1804 
1805     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1806                 gCamCapability[cameraId]->filter_densities,
1807                 gCamCapability[cameraId]->filter_densities_count);
1808 
1809 
1810     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1811                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
1812                       gCamCapability[cameraId]->optical_stab_modes_count);
1813 
1814     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
1815                                                     gCamCapability[cameraId]->lens_shading_map_size.height};
1816     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
1817                       lens_shading_map_size,
1818                       sizeof(lens_shading_map_size)/sizeof(int32_t));
1819 
1820     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1821             gCamCapability[cameraId]->sensor_physical_size, 2);
1822 
1823     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1824             gCamCapability[cameraId]->exposure_time_range, 2);
1825 
1826     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1827             &gCamCapability[cameraId]->max_frame_duration, 1);
1828 
1829     camera_metadata_rational baseGainFactor = {
1830             gCamCapability[cameraId]->base_gain_factor.numerator,
1831             gCamCapability[cameraId]->base_gain_factor.denominator};
1832     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
1833             &baseGainFactor, 1);
1834 
1835     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1836                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
1837 
1838     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
1839                                                gCamCapability[cameraId]->pixel_array_size.height};
1840     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1841                       pixel_array_size, 2);
1842 
1843     int32_t active_array_size[] = {0, 0,
1844                                                 gCamCapability[cameraId]->active_array_size.width,
1845                                                 gCamCapability[cameraId]->active_array_size.height};
1846     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1847                       active_array_size, 4);
1848 
1849     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1850             &gCamCapability[cameraId]->white_level, 1);
1851 
1852     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1853             gCamCapability[cameraId]->black_level_pattern, 4);
1854 
1855     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
1856                       &gCamCapability[cameraId]->flash_charge_duration, 1);
1857 
1858     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
1859                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
1860 
1861     /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1862                       (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
1863     /*hardcode 0 for now*/
1864     int32_t max_face_count = 0;
1865     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1866                       &max_face_count, 1);
1867 
1868     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1869                       &gCamCapability[cameraId]->histogram_size, 1);
1870 
1871     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1872             &gCamCapability[cameraId]->max_histogram_count, 1);
1873 
1874     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
1875                                                 gCamCapability[cameraId]->sharpness_map_size.height};
1876 
1877     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1878             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
1879 
1880     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1881             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
1882 
1883 
1884     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1885                       &gCamCapability[cameraId]->raw_min_duration,
1886                        1);
1887 
1888     int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
1889                                                 HAL_PIXEL_FORMAT_BLOB};
1890     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
1891     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1892                       scalar_formats,
1893                       scalar_formats_count);
1894 
1895     int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
1896     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
1897               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1898               available_processed_sizes);
1899     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1900                 available_processed_sizes,
1901                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
1902 
1903     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1904                       &gCamCapability[cameraId]->jpeg_min_duration[0],
1905                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
1906 
1907     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
1908     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
1909                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
1910                  available_fps_ranges);
1911     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1912             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
1913 
1914     camera_metadata_rational exposureCompensationStep = {
1915             gCamCapability[cameraId]->exp_compensation_step.numerator,
1916             gCamCapability[cameraId]->exp_compensation_step.denominator};
1917     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1918                       &exposureCompensationStep, 1);
1919 
1920     /*TO DO*/
1921     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1922     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1923                       availableVstabModes, sizeof(availableVstabModes));
1924 
1925     /*HAL 1 and HAL 3 common*/
1926     float maxZoom = 4;
1927     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1928             &maxZoom, 1);
1929 
1930     int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
1931     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
1932             max3aRegions, 3);
1933 
1934     uint8_t availableFaceDetectModes[] = {
1935             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
1936     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1937                       availableFaceDetectModes,
1938                       sizeof(availableFaceDetectModes));
1939 
1940     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
1941                                                         gCamCapability[cameraId]->exposure_compensation_max};
1942     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1943             exposureCompensationRange,
1944             sizeof(exposureCompensationRange)/sizeof(int32_t));
1945 
1946     uint8_t lensFacing = (facingBack) ?
1947             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1948     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
1949 
1950     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1951                 available_processed_sizes,
1952                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
1953 
1954     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1955                       available_thumbnail_sizes,
1956                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
1957 
1958     int32_t max_jpeg_size = 0;
1959     int temp_width, temp_height;
1960     for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
1961         temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
1962         temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
1963         if (temp_width * temp_height > max_jpeg_size ) {
1964             max_jpeg_size = temp_width * temp_height;
1965         }
1966     }
1967     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
1968     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
1969                       &max_jpeg_size, 1);
1970 
1971     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
1972     int32_t size = 0;
1973     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
1974         int val = lookupFwkName(EFFECT_MODES_MAP,
1975                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
1976                                    gCamCapability[cameraId]->supported_effects[i]);
1977         if (val != NAME_NOT_FOUND) {
1978             avail_effects[size] = (uint8_t)val;
1979             size++;
1980         }
1981     }
1982     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1983                       avail_effects,
1984                       size);
1985 
1986     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
1987     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
1988     int32_t supported_scene_modes_cnt = 0;
1989     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
1990         int val = lookupFwkName(SCENE_MODES_MAP,
1991                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
1992                                 gCamCapability[cameraId]->supported_scene_modes[i]);
1993         if (val != NAME_NOT_FOUND) {
1994             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
1995             supported_indexes[supported_scene_modes_cnt] = i;
1996             supported_scene_modes_cnt++;
1997         }
1998     }
1999 
2000     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2001                       avail_scene_modes,
2002                       supported_scene_modes_cnt);
2003 
2004     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
2005     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
2006                       supported_scene_modes_cnt,
2007                       scene_mode_overrides,
2008                       supported_indexes,
2009                       cameraId);
2010     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
2011                       scene_mode_overrides,
2012                       supported_scene_modes_cnt*3);
2013 
2014     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
2015     size = 0;
2016     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
2017         int val = lookupFwkName(ANTIBANDING_MODES_MAP,
2018                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2019                                  gCamCapability[cameraId]->supported_antibandings[i]);
2020         if (val != NAME_NOT_FOUND) {
2021             avail_antibanding_modes[size] = (uint8_t)val;
2022             size++;
2023         }
2024 
2025     }
2026     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2027                       avail_antibanding_modes,
2028                       size);
2029 
2030     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
2031     size = 0;
2032     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
2033         int val = lookupFwkName(FOCUS_MODES_MAP,
2034                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2035                                 gCamCapability[cameraId]->supported_focus_modes[i]);
2036         if (val != NAME_NOT_FOUND) {
2037             avail_af_modes[size] = (uint8_t)val;
2038             size++;
2039         }
2040     }
2041     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2042                       avail_af_modes,
2043                       size);
2044 
2045     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
2046     size = 0;
2047     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
2048         int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
2049                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2050                                     gCamCapability[cameraId]->supported_white_balances[i]);
2051         if (val != NAME_NOT_FOUND) {
2052             avail_awb_modes[size] = (uint8_t)val;
2053             size++;
2054         }
2055     }
2056     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2057                       avail_awb_modes,
2058                       size);
2059 
2060     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
2061     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
2062       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
2063 
2064     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
2065             available_flash_levels,
2066             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
2067 
2068 
2069     uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
2070     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
2071             &flashAvailable, 1);
2072 
2073     uint8_t avail_ae_modes[5];
2074     size = 0;
2075     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
2076         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
2077         size++;
2078     }
2079     if (flashAvailable) {
2080         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2081         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
2082         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2083     }
2084     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2085                       avail_ae_modes,
2086                       size);
2087 
2088     int32_t sensitivity_range[2];
2089     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
2090     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
2091     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2092                       sensitivity_range,
2093                       sizeof(sensitivity_range) / sizeof(int32_t));
2094 
2095     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
2096                       &gCamCapability[cameraId]->max_analog_sensitivity,
2097                       1);
2098 
2099     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2100                       &gCamCapability[cameraId]->jpeg_min_duration[0],
2101                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
2102 
2103     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
2104     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
2105                       &sensor_orientation,
2106                       1);
2107 
2108     int32_t max_output_streams[3] = {1, 3, 1};
2109     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
2110                       max_output_streams,
2111                       3);
2112 
2113     gStaticMetadata[cameraId] = staticInfo.release();
2114     return rc;
2115 }
2116 
2117 /*===========================================================================
2118  * FUNCTION   : makeTable
2119  *
2120  * DESCRIPTION: make a table of sizes
2121  *
2122  * PARAMETERS :
2123  *
2124  *
2125  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,uint8_t size,int32_t * sizeTable)2126 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
2127                                           int32_t* sizeTable)
2128 {
2129     int j = 0;
2130     for (int i = 0; i < size; i++) {
2131         sizeTable[j] = dimTable[i].width;
2132         sizeTable[j+1] = dimTable[i].height;
2133         j+=2;
2134     }
2135 }
2136 
2137 /*===========================================================================
2138  * FUNCTION   : makeFPSTable
2139  *
2140  * DESCRIPTION: make a table of fps ranges
2141  *
2142  * PARAMETERS :
2143  *
2144  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,uint8_t size,int32_t * fpsRangesTable)2145 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
2146                                           int32_t* fpsRangesTable)
2147 {
2148     int j = 0;
2149     for (int i = 0; i < size; i++) {
2150         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
2151         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
2152         j+=2;
2153     }
2154 }
2155 
2156 /*===========================================================================
2157  * FUNCTION   : makeOverridesList
2158  *
2159  * DESCRIPTION: make a list of scene mode overrides
2160  *
2161  * PARAMETERS :
2162  *
2163  *
2164  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,uint8_t size,uint8_t * overridesList,uint8_t * supported_indexes,int camera_id)2165 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
2166                                                   uint8_t size, uint8_t* overridesList,
2167                                                   uint8_t* supported_indexes,
2168                                                   int camera_id)
2169 {
2170     /*daemon will give a list of overrides for all scene modes.
2171       However we should send the fwk only the overrides for the scene modes
2172       supported by the framework*/
2173     int j = 0, index = 0, supt = 0;
2174     uint8_t focus_override;
2175     for (int i = 0; i < size; i++) {
2176         supt = 0;
2177         index = supported_indexes[i];
2178         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
2179         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2180                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
2181                                                     overridesTable[index].awb_mode);
2182         focus_override = (uint8_t)overridesTable[index].af_mode;
2183         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
2184            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
2185               supt = 1;
2186               break;
2187            }
2188         }
2189         if (supt) {
2190            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2191                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
2192                                               focus_override);
2193         } else {
2194            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
2195         }
2196         j+=3;
2197     }
2198 }
2199 
2200 /*===========================================================================
2201  * FUNCTION   : getPreviewHalPixelFormat
2202  *
2203  * DESCRIPTION: convert the format to type recognized by framework
2204  *
2205  * PARAMETERS : format : the format from backend
2206  *
2207  ** RETURN    : format recognized by framework
2208  *
2209  *==========================================================================*/
getScalarFormat(int32_t format)2210 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
2211 {
2212     int32_t halPixelFormat;
2213 
2214     switch (format) {
2215     case CAM_FORMAT_YUV_420_NV12:
2216         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
2217         break;
2218     case CAM_FORMAT_YUV_420_NV21:
2219         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2220         break;
2221     case CAM_FORMAT_YUV_420_NV21_ADRENO:
2222         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
2223         break;
2224     case CAM_FORMAT_YUV_420_YV12:
2225         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
2226         break;
2227     case CAM_FORMAT_YUV_422_NV16:
2228     case CAM_FORMAT_YUV_422_NV61:
2229     default:
2230         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
2231         break;
2232     }
2233     return halPixelFormat;
2234 }
2235 
2236 /*===========================================================================
2237  * FUNCTION   : getSensorSensitivity
2238  *
2239  * DESCRIPTION: convert iso_mode to an integer value
2240  *
2241  * PARAMETERS : iso_mode : the iso_mode supported by sensor
2242  *
2243  ** RETURN    : sensitivity supported by sensor
2244  *
2245  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)2246 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
2247 {
2248     int32_t sensitivity;
2249 
2250     switch (iso_mode) {
2251     case CAM_ISO_MODE_100:
2252         sensitivity = 100;
2253         break;
2254     case CAM_ISO_MODE_200:
2255         sensitivity = 200;
2256         break;
2257     case CAM_ISO_MODE_400:
2258         sensitivity = 400;
2259         break;
2260     case CAM_ISO_MODE_800:
2261         sensitivity = 800;
2262         break;
2263     case CAM_ISO_MODE_1600:
2264         sensitivity = 1600;
2265         break;
2266     default:
2267         sensitivity = -1;
2268         break;
2269     }
2270     return sensitivity;
2271 }
2272 
2273 
2274 /*===========================================================================
2275  * FUNCTION   : AddSetParmEntryToBatch
2276  *
2277  * DESCRIPTION: add set parameter entry into batch
2278  *
2279  * PARAMETERS :
2280  *   @p_table     : ptr to parameter buffer
2281  *   @paramType   : parameter type
2282  *   @paramLength : length of parameter value
2283  *   @paramValue  : ptr to parameter value
2284  *
2285  * RETURN     : int32_t type of status
2286  *              NO_ERROR  -- success
2287  *              none-zero failure code
2288  *==========================================================================*/
AddSetParmEntryToBatch(parm_buffer_t * p_table,cam_intf_parm_type_t paramType,uint32_t paramLength,void * paramValue)2289 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
2290                                                           cam_intf_parm_type_t paramType,
2291                                                           uint32_t paramLength,
2292                                                           void *paramValue)
2293 {
2294     int position = paramType;
2295     int current, next;
2296 
2297     /*************************************************************************
2298     *                 Code to take care of linking next flags                *
2299     *************************************************************************/
2300     current = GET_FIRST_PARAM_ID(p_table);
2301     if (position == current){
2302         //DO NOTHING
2303     } else if (position < current){
2304         SET_NEXT_PARAM_ID(position, p_table, current);
2305         SET_FIRST_PARAM_ID(p_table, position);
2306     } else {
2307         /* Search for the position in the linked list where we need to slot in*/
2308         while (position > GET_NEXT_PARAM_ID(current, p_table))
2309             current = GET_NEXT_PARAM_ID(current, p_table);
2310 
2311         /*If node already exists no need to alter linking*/
2312         if (position != GET_NEXT_PARAM_ID(current, p_table)) {
2313             next = GET_NEXT_PARAM_ID(current, p_table);
2314             SET_NEXT_PARAM_ID(current, p_table, position);
2315             SET_NEXT_PARAM_ID(position, p_table, next);
2316         }
2317     }
2318 
2319     /*************************************************************************
2320     *                   Copy contents into entry                             *
2321     *************************************************************************/
2322 
2323     if (paramLength > sizeof(parm_type_t)) {
2324         ALOGE("%s:Size of input larger than max entry size",__func__);
2325         return BAD_VALUE;
2326     }
2327     memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
2328     return NO_ERROR;
2329 }
2330 
2331 /*===========================================================================
2332  * FUNCTION   : lookupFwkName
2333  *
2334  * DESCRIPTION: In case the enum is not same in fwk and backend
2335  *              make sure the parameter is correctly propogated
2336  *
2337  * PARAMETERS  :
2338  *   @arr      : map between the two enums
2339  *   @len      : len of the map
2340  *   @hal_name : name of the hal_parm to map
2341  *
2342  * RETURN     : int type of status
2343  *              fwk_name  -- success
2344  *              none-zero failure code
2345  *==========================================================================*/
lookupFwkName(const QCameraMap arr[],int len,int hal_name)2346 int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
2347                                              int len, int hal_name)
2348 {
2349 
2350     for (int i = 0; i < len; i++) {
2351         if (arr[i].hal_name == hal_name)
2352             return arr[i].fwk_name;
2353     }
2354 
2355     /* Not able to find matching framework type is not necessarily
2356      * an error case. This happens when mm-camera supports more attributes
2357      * than the frameworks do */
2358     ALOGD("%s: Cannot find matching framework type", __func__);
2359     return NAME_NOT_FOUND;
2360 }
2361 
2362 /*===========================================================================
2363  * FUNCTION   : lookupHalName
2364  *
2365  * DESCRIPTION: In case the enum is not same in fwk and backend
2366  *              make sure the parameter is correctly propogated
2367  *
2368  * PARAMETERS  :
2369  *   @arr      : map between the two enums
2370  *   @len      : len of the map
2371  *   @fwk_name : name of the hal_parm to map
2372  *
2373  * RETURN     : int32_t type of status
2374  *              hal_name  -- success
2375  *              none-zero failure code
2376  *==========================================================================*/
lookupHalName(const QCameraMap arr[],int len,int fwk_name)2377 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
2378                                              int len, int fwk_name)
2379 {
2380     for (int i = 0; i < len; i++) {
2381        if (arr[i].fwk_name == fwk_name)
2382            return arr[i].hal_name;
2383     }
2384     ALOGE("%s: Cannot find matching hal type", __func__);
2385     return NAME_NOT_FOUND;
2386 }
2387 
2388 /*===========================================================================
2389  * FUNCTION   : getCapabilities
2390  *
2391  * DESCRIPTION: query camera capabilities
2392  *
2393  * PARAMETERS :
2394  *   @cameraId  : camera Id
2395  *   @info      : camera info struct to be filled in with camera capabilities
2396  *
2397  * RETURN     : int32_t type of status
2398  *              NO_ERROR  -- success
2399  *              none-zero failure code
2400  *==========================================================================*/
getCamInfo(int cameraId,struct camera_info * info)2401 int QCamera3HardwareInterface::getCamInfo(int cameraId,
2402                                     struct camera_info *info)
2403 {
2404     int rc = 0;
2405 
2406     if (NULL == gCamCapability[cameraId]) {
2407         rc = initCapabilities(cameraId);
2408         if (rc < 0) {
2409             //pthread_mutex_unlock(&g_camlock);
2410             return rc;
2411         }
2412     }
2413 
2414     if (NULL == gStaticMetadata[cameraId]) {
2415         rc = initStaticMetadata(cameraId);
2416         if (rc < 0) {
2417             return rc;
2418         }
2419     }
2420 
2421     switch(gCamCapability[cameraId]->position) {
2422     case CAM_POSITION_BACK:
2423         info->facing = CAMERA_FACING_BACK;
2424         break;
2425 
2426     case CAM_POSITION_FRONT:
2427         info->facing = CAMERA_FACING_FRONT;
2428         break;
2429 
2430     default:
2431         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
2432         rc = -1;
2433         break;
2434     }
2435 
2436 
2437     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
2438     info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
2439     info->static_camera_characteristics = gStaticMetadata[cameraId];
2440 
2441     return rc;
2442 }
2443 
2444 /*===========================================================================
2445  * FUNCTION   : translateMetadata
2446  *
2447  * DESCRIPTION: translate the metadata into camera_metadata_t
2448  *
2449  * PARAMETERS : type of the request
2450  *
2451  *
2452  * RETURN     : success: camera_metadata_t*
2453  *              failure: NULL
2454  *
2455  *==========================================================================*/
translateCapabilityToMetadata(int type)2456 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
2457 {
2458     pthread_mutex_lock(&mMutex);
2459 
2460     if (mDefaultMetadata[type] != NULL) {
2461         pthread_mutex_unlock(&mMutex);
2462         return mDefaultMetadata[type];
2463     }
2464     //first time we are handling this request
2465     //fill up the metadata structure using the wrapper class
2466     CameraMetadata settings;
2467     //translate from cam_capability_t to camera_metadata_tag_t
2468     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2469     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
2470     int32_t defaultRequestID = 0;
2471     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
2472 
2473     /*control*/
2474 
2475     uint8_t controlIntent = 0;
2476     switch (type) {
2477       case CAMERA3_TEMPLATE_PREVIEW:
2478         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2479         break;
2480       case CAMERA3_TEMPLATE_STILL_CAPTURE:
2481         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2482         break;
2483       case CAMERA3_TEMPLATE_VIDEO_RECORD:
2484         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2485         break;
2486       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
2487         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2488         break;
2489       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
2490         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2491         break;
2492       default:
2493         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2494         break;
2495     }
2496     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2497 
2498     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2499             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
2500 
2501     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2502     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2503 
2504     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2505     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2506 
2507     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
2508     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2509 
2510     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2511     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
2512 
2513     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2514     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2515 
2516     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
2517     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2518 
2519     static uint8_t focusMode;
2520     if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
2521         ALOGE("%s: Setting focus mode to auto", __func__);
2522         focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
2523     } else {
2524         ALOGE("%s: Setting focus mode to off", __func__);
2525         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
2526     }
2527     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
2528 
2529     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
2530     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2531 
2532     /*flash*/
2533     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2534     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
2535 
2536     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
2537     settings.update(ANDROID_FLASH_FIRING_POWER,
2538             &flashFiringLevel, 1);
2539 
2540     /* lens */
2541     float default_aperture = gCamCapability[mCameraId]->apertures[0];
2542     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
2543 
2544     if (gCamCapability[mCameraId]->filter_densities_count) {
2545         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
2546         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
2547                         gCamCapability[mCameraId]->filter_densities_count);
2548     }
2549 
2550     float default_focal_length = gCamCapability[mCameraId]->focal_length;
2551     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
2552 
2553     /* Exposure time(Update the Min Exposure Time)*/
2554     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
2555     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
2556 
2557     /* sensitivity */
2558     static const int32_t default_sensitivity = 100;
2559     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
2560 
2561     mDefaultMetadata[type] = settings.release();
2562 
2563     pthread_mutex_unlock(&mMutex);
2564     return mDefaultMetadata[type];
2565 }
2566 
2567 /*===========================================================================
2568  * FUNCTION   : setFrameParameters
2569  *
2570  * DESCRIPTION: set parameters per frame as requested in the metadata from
2571  *              framework
2572  *
2573  * PARAMETERS :
2574  *   @frame_id  : frame number for this particular request
2575  *   @settings  : frame settings information from framework
2576  *   @streamTypeMask : bit mask of stream types on which buffers are requested
2577  *   @aeTrigger : Return aeTrigger if it exists in the request
2578  *
2579  * RETURN     : success: NO_ERROR
2580  *              failure:
2581  *==========================================================================*/
setFrameParameters(int frame_id,const camera_metadata_t * settings,uint32_t streamTypeMask,cam_trigger_t & aeTrigger)2582 int QCamera3HardwareInterface::setFrameParameters(int frame_id,
2583         const camera_metadata_t *settings, uint32_t streamTypeMask,
2584         cam_trigger_t &aeTrigger)
2585 {
2586     /*translate from camera_metadata_t type to parm_type_t*/
2587     int rc = 0;
2588     if (settings == NULL && mFirstRequest) {
2589         /*settings cannot be null for the first request*/
2590         return BAD_VALUE;
2591     }
2592 
2593     int32_t hal_version = CAM_HAL_V3;
2594 
2595     memset(mParameters, 0, sizeof(parm_buffer_t));
2596     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
2597     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2598                 sizeof(hal_version), &hal_version);
2599 
2600     /*we need to update the frame number in the parameters*/
2601     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
2602                                 sizeof(frame_id), &frame_id);
2603     if (rc < 0) {
2604         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2605         return BAD_VALUE;
2606     }
2607 
2608     /* Update stream id mask where buffers are requested */
2609     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
2610                                 sizeof(streamTypeMask), &streamTypeMask);
2611     if (rc < 0) {
2612         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
2613         return BAD_VALUE;
2614     }
2615 
2616     if(settings != NULL){
2617         rc = translateMetadataToParameters(settings, aeTrigger);
2618     }
2619     /*set the parameters to backend*/
2620     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2621     return rc;
2622 }
2623 
2624 /*===========================================================================
2625  * FUNCTION   : translateMetadataToParameters
2626  *
2627  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
2628  *
2629  *
2630  * PARAMETERS :
2631  *   @settings  : frame settings information from framework
2632  *   @aeTrigger : output ae trigger if it's set in request
2633  *
2634  * RETURN     : success: NO_ERROR
2635  *              failure:
2636  *==========================================================================*/
translateMetadataToParameters(const camera_metadata_t * settings,cam_trigger_t & aeTrigger)2637 int QCamera3HardwareInterface::translateMetadataToParameters(
2638         const camera_metadata_t *settings, cam_trigger_t &aeTrigger)
2639 {
2640     int rc = 0;
2641     CameraMetadata frame_settings;
2642     frame_settings = settings;
2643 
2644 
2645     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
2646         int32_t antibandingMode =
2647             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
2648         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
2649                 sizeof(antibandingMode), &antibandingMode);
2650     }
2651 
2652     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
2653         int32_t expCompensation = frame_settings.find(
2654             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
2655         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
2656             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
2657         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
2658             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
2659         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
2660           sizeof(expCompensation), &expCompensation);
2661     }
2662 
2663     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
2664         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
2665         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
2666                 sizeof(aeLock), &aeLock);
2667     }
2668     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2669         cam_fps_range_t fps_range;
2670         fps_range.min_fps =
2671             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
2672         fps_range.max_fps =
2673             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2674         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
2675                 sizeof(fps_range), &fps_range);
2676     }
2677 
2678     float focalDistance = -1.0;
2679     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
2680         focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
2681         rc = AddSetParmEntryToBatch(mParameters,
2682                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
2683                 sizeof(focalDistance), &focalDistance);
2684     }
2685 
2686     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
2687         uint8_t fwk_focusMode =
2688             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
2689         uint8_t focusMode;
2690         if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
2691             focusMode = CAM_FOCUS_MODE_INFINITY;
2692         } else{
2693          focusMode = lookupHalName(FOCUS_MODES_MAP,
2694                                    sizeof(FOCUS_MODES_MAP),
2695                                    fwk_focusMode);
2696         }
2697         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
2698                 sizeof(focusMode), &focusMode);
2699     }
2700 
2701     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
2702         uint8_t awbLock =
2703             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
2704         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
2705                 sizeof(awbLock), &awbLock);
2706     }
2707 
2708     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
2709         uint8_t fwk_whiteLevel =
2710             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
2711         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
2712                 sizeof(WHITE_BALANCE_MODES_MAP),
2713                 fwk_whiteLevel);
2714         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
2715                 sizeof(whiteLevel), &whiteLevel);
2716     }
2717 
2718     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
2719         uint8_t fwk_effectMode =
2720             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
2721         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
2722                 sizeof(EFFECT_MODES_MAP),
2723                 fwk_effectMode);
2724         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
2725                 sizeof(effectMode), &effectMode);
2726     }
2727 
2728     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2729         uint8_t fwk_aeMode =
2730             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2731         uint8_t aeMode;
2732         int32_t redeye;
2733 
2734         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
2735             aeMode = CAM_AE_MODE_OFF;
2736         } else {
2737             aeMode = CAM_AE_MODE_ON;
2738         }
2739         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
2740             redeye = 1;
2741         } else {
2742             redeye = 0;
2743         }
2744 
2745         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
2746                                           sizeof(AE_FLASH_MODE_MAP),
2747                                           fwk_aeMode);
2748         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
2749                 sizeof(aeMode), &aeMode);
2750         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2751                 sizeof(flashMode), &flashMode);
2752         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
2753                 sizeof(redeye), &redeye);
2754     }
2755 
2756     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
2757         uint8_t colorCorrectMode =
2758             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
2759         rc =
2760             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
2761                     sizeof(colorCorrectMode), &colorCorrectMode);
2762     }
2763 
2764     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
2765         cam_color_correct_gains_t colorCorrectGains;
2766         for (int i = 0; i < 4; i++) {
2767             colorCorrectGains.gains[i] =
2768                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
2769         }
2770         rc =
2771             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
2772                     sizeof(colorCorrectGains), &colorCorrectGains);
2773     }
2774 
2775     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
2776         cam_color_correct_matrix_t colorCorrectTransform;
2777         cam_rational_type_t transform_elem;
2778         int num = 0;
2779         for (int i = 0; i < 3; i++) {
2780            for (int j = 0; j < 3; j++) {
2781               transform_elem.numerator =
2782                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
2783               transform_elem.denominator =
2784                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
2785               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
2786               num++;
2787            }
2788         }
2789         rc =
2790             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
2791                     sizeof(colorCorrectTransform), &colorCorrectTransform);
2792     }
2793 
2794     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
2795         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
2796         aeTrigger.trigger =
2797             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
2798         aeTrigger.trigger_id =
2799             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
2800         mPrecaptureId = aeTrigger.trigger_id;
2801     }
2802     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
2803                                 sizeof(aeTrigger), &aeTrigger);
2804 
2805     /*af_trigger must come with a trigger id*/
2806     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
2807         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
2808         cam_trigger_t af_trigger;
2809         af_trigger.trigger =
2810             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
2811         af_trigger.trigger_id =
2812             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
2813         rc = AddSetParmEntryToBatch(mParameters,
2814                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
2815     }
2816 
2817     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
2818         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
2819         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
2820                 sizeof(metaMode), &metaMode);
2821         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2822            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
2823            uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
2824                                              sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
2825                                              fwk_sceneMode);
2826            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2827                 sizeof(sceneMode), &sceneMode);
2828         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
2829            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2830            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2831                 sizeof(sceneMode), &sceneMode);
2832         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
2833            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
2834            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
2835                 sizeof(sceneMode), &sceneMode);
2836         }
2837     }
2838 
2839     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
2840         int32_t demosaic =
2841             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
2842         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
2843                 sizeof(demosaic), &demosaic);
2844     }
2845 
2846     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
2847         uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
2848         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
2849                 sizeof(edgeMode), &edgeMode);
2850     }
2851 
2852     if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
2853         int32_t edgeStrength =
2854             frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
2855         rc = AddSetParmEntryToBatch(mParameters,
2856                 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
2857     }
2858 
2859     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
2860         int32_t respectFlashMode = 1;
2861         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
2862             uint8_t fwk_aeMode =
2863                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
2864             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
2865                 respectFlashMode = 0;
2866                 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
2867                     __func__);
2868             }
2869         }
2870         if (respectFlashMode) {
2871             uint8_t flashMode =
2872                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
2873             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
2874                                           sizeof(FLASH_MODES_MAP),
2875                                           flashMode);
2876             ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
2877             // To check: CAM_INTF_META_FLASH_MODE usage
2878             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
2879                           sizeof(flashMode), &flashMode);
2880         }
2881     }
2882 
2883     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
2884         uint8_t flashPower =
2885             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
2886         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
2887                 sizeof(flashPower), &flashPower);
2888     }
2889 
2890     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
2891         int64_t flashFiringTime =
2892             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
2893         rc = AddSetParmEntryToBatch(mParameters,
2894                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
2895     }
2896 
2897     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
2898         uint8_t hotPixelMode =
2899             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
2900         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
2901                 sizeof(hotPixelMode), &hotPixelMode);
2902     }
2903 
2904     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
2905         float lensAperture =
2906             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
2907         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
2908                 sizeof(lensAperture), &lensAperture);
2909     }
2910 
2911     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
2912         float filterDensity =
2913             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
2914         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
2915                 sizeof(filterDensity), &filterDensity);
2916     }
2917 
2918     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
2919         float focalLength =
2920             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
2921         rc = AddSetParmEntryToBatch(mParameters,
2922                 CAM_INTF_META_LENS_FOCAL_LENGTH,
2923                 sizeof(focalLength), &focalLength);
2924     }
2925 
2926     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
2927         uint8_t optStabMode =
2928             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
2929         rc = AddSetParmEntryToBatch(mParameters,
2930                 CAM_INTF_META_LENS_OPT_STAB_MODE,
2931                 sizeof(optStabMode), &optStabMode);
2932     }
2933 
2934     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
2935         uint8_t noiseRedMode =
2936             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
2937         rc = AddSetParmEntryToBatch(mParameters,
2938                 CAM_INTF_META_NOISE_REDUCTION_MODE,
2939                 sizeof(noiseRedMode), &noiseRedMode);
2940     }
2941 
2942     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
2943         uint8_t noiseRedStrength =
2944             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
2945         rc = AddSetParmEntryToBatch(mParameters,
2946                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
2947                 sizeof(noiseRedStrength), &noiseRedStrength);
2948     }
2949 
2950     cam_crop_region_t scalerCropRegion;
2951     bool scalerCropSet = false;
2952     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
2953         scalerCropRegion.left =
2954             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
2955         scalerCropRegion.top =
2956             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
2957         scalerCropRegion.width =
2958             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
2959         scalerCropRegion.height =
2960             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
2961         rc = AddSetParmEntryToBatch(mParameters,
2962                 CAM_INTF_META_SCALER_CROP_REGION,
2963                 sizeof(scalerCropRegion), &scalerCropRegion);
2964         scalerCropSet = true;
2965     }
2966 
2967     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2968         int64_t sensorExpTime =
2969             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
2970         rc = AddSetParmEntryToBatch(mParameters,
2971                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
2972                 sizeof(sensorExpTime), &sensorExpTime);
2973     }
2974 
2975     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
2976         int64_t sensorFrameDuration =
2977             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
2978         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
2979             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
2980         rc = AddSetParmEntryToBatch(mParameters,
2981                 CAM_INTF_META_SENSOR_FRAME_DURATION,
2982                 sizeof(sensorFrameDuration), &sensorFrameDuration);
2983     }
2984 
2985     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
2986         int32_t sensorSensitivity =
2987             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
2988         if (sensorSensitivity <
2989                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
2990             sensorSensitivity =
2991                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
2992         if (sensorSensitivity >
2993                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
2994             sensorSensitivity =
2995                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
2996         rc = AddSetParmEntryToBatch(mParameters,
2997                 CAM_INTF_META_SENSOR_SENSITIVITY,
2998                 sizeof(sensorSensitivity), &sensorSensitivity);
2999     }
3000 
3001     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
3002         int32_t shadingMode =
3003             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
3004         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
3005                 sizeof(shadingMode), &shadingMode);
3006     }
3007 
3008     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
3009         uint8_t shadingStrength =
3010             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
3011         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
3012                 sizeof(shadingStrength), &shadingStrength);
3013     }
3014 
3015     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
3016         uint8_t facedetectMode =
3017             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
3018         rc = AddSetParmEntryToBatch(mParameters,
3019                 CAM_INTF_META_STATS_FACEDETECT_MODE,
3020                 sizeof(facedetectMode), &facedetectMode);
3021     }
3022 
3023     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
3024         uint8_t histogramMode =
3025             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
3026         rc = AddSetParmEntryToBatch(mParameters,
3027                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
3028                 sizeof(histogramMode), &histogramMode);
3029     }
3030 
3031     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
3032         uint8_t sharpnessMapMode =
3033             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
3034         rc = AddSetParmEntryToBatch(mParameters,
3035                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
3036                 sizeof(sharpnessMapMode), &sharpnessMapMode);
3037     }
3038 
3039     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
3040         uint8_t tonemapMode =
3041             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
3042         rc = AddSetParmEntryToBatch(mParameters,
3043                 CAM_INTF_META_TONEMAP_MODE,
3044                 sizeof(tonemapMode), &tonemapMode);
3045     }
3046     int point = 0;
3047     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
3048         cam_tonemap_curve_t tonemapCurveBlue;
3049         tonemapCurveBlue.tonemap_points_cnt =
3050            gCamCapability[mCameraId]->max_tone_map_curve_points;
3051         for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
3052             for (int j = 0; j < 2; j++) {
3053                tonemapCurveBlue.tonemap_points[i][j] =
3054                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
3055                point++;
3056             }
3057         }
3058         rc = AddSetParmEntryToBatch(mParameters,
3059                 CAM_INTF_META_TONEMAP_CURVE_BLUE,
3060                 sizeof(tonemapCurveBlue), &tonemapCurveBlue);
3061     }
3062     point = 0;
3063     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
3064         cam_tonemap_curve_t tonemapCurveGreen;
3065         tonemapCurveGreen.tonemap_points_cnt =
3066            gCamCapability[mCameraId]->max_tone_map_curve_points;
3067         for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
3068             for (int j = 0; j < 2; j++) {
3069                tonemapCurveGreen.tonemap_points[i][j] =
3070                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
3071                point++;
3072             }
3073         }
3074         rc = AddSetParmEntryToBatch(mParameters,
3075                 CAM_INTF_META_TONEMAP_CURVE_GREEN,
3076                 sizeof(tonemapCurveGreen), &tonemapCurveGreen);
3077     }
3078     point = 0;
3079     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
3080         cam_tonemap_curve_t tonemapCurveRed;
3081         tonemapCurveRed.tonemap_points_cnt =
3082            gCamCapability[mCameraId]->max_tone_map_curve_points;
3083         for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
3084             for (int j = 0; j < 2; j++) {
3085                tonemapCurveRed.tonemap_points[i][j] =
3086                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
3087                point++;
3088             }
3089         }
3090         rc = AddSetParmEntryToBatch(mParameters,
3091                 CAM_INTF_META_TONEMAP_CURVE_RED,
3092                 sizeof(tonemapCurveRed), &tonemapCurveRed);
3093     }
3094 
3095     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3096         uint8_t captureIntent =
3097             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3098         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
3099                 sizeof(captureIntent), &captureIntent);
3100     }
3101 
3102     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
3103         uint8_t blackLevelLock =
3104             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
3105         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
3106                 sizeof(blackLevelLock), &blackLevelLock);
3107     }
3108 
3109     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
3110         uint8_t lensShadingMapMode =
3111             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
3112         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
3113                 sizeof(lensShadingMapMode), &lensShadingMapMode);
3114     }
3115 
3116     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
3117         cam_area_t roi;
3118         bool reset = true;
3119         convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
3120         if (scalerCropSet) {
3121             reset = resetIfNeededROI(&roi, &scalerCropRegion);
3122         }
3123         if (reset) {
3124             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
3125                     sizeof(roi), &roi);
3126         }
3127     }
3128 
3129     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
3130         cam_area_t roi;
3131         bool reset = true;
3132         convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
3133         if (scalerCropSet) {
3134             reset = resetIfNeededROI(&roi, &scalerCropRegion);
3135         }
3136         if (reset) {
3137             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
3138                     sizeof(roi), &roi);
3139         }
3140     }
3141 
3142     if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
3143         cam_area_t roi;
3144         bool reset = true;
3145         convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
3146         if (scalerCropSet) {
3147             reset = resetIfNeededROI(&roi, &scalerCropRegion);
3148         }
3149         if (reset) {
3150             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
3151                     sizeof(roi), &roi);
3152         }
3153     }
3154     return rc;
3155 }
3156 
3157 /*===========================================================================
3158  * FUNCTION   : getJpegSettings
3159  *
3160  * DESCRIPTION: save the jpeg settings in the HAL
3161  *
3162  *
3163  * PARAMETERS :
3164  *   @settings  : frame settings information from framework
3165  *
3166  *
3167  * RETURN     : success: NO_ERROR
3168  *              failure:
3169  *==========================================================================*/
getJpegSettings(const camera_metadata_t * settings)3170 int QCamera3HardwareInterface::getJpegSettings
3171                                   (const camera_metadata_t *settings)
3172 {
3173     if (mJpegSettings) {
3174         if (mJpegSettings->gps_timestamp) {
3175             free(mJpegSettings->gps_timestamp);
3176             mJpegSettings->gps_timestamp = NULL;
3177         }
3178         for (int i = 0; i < 3; i++) {
3179             free(mJpegSettings->gps_coordinates[i]);
3180             mJpegSettings->gps_coordinates[i] = NULL;
3181         }
3182         free(mJpegSettings);
3183         mJpegSettings = NULL;
3184     }
3185     mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
3186     CameraMetadata jpeg_settings;
3187     jpeg_settings = settings;
3188 
3189     if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3190         mJpegSettings->jpeg_orientation =
3191             jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3192     } else {
3193         mJpegSettings->jpeg_orientation = 0;
3194     }
3195     if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
3196         mJpegSettings->jpeg_quality =
3197             jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
3198     } else {
3199         mJpegSettings->jpeg_quality = 85;
3200     }
3201     if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3202         mJpegSettings->thumbnail_size.width =
3203             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3204         mJpegSettings->thumbnail_size.height =
3205             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3206     } else {
3207         mJpegSettings->thumbnail_size.width = 0;
3208         mJpegSettings->thumbnail_size.height = 0;
3209     }
3210     if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
3211         for (int i = 0; i < 3; i++) {
3212             mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
3213             *(mJpegSettings->gps_coordinates[i]) =
3214                 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
3215         }
3216     } else{
3217        for (int i = 0; i < 3; i++) {
3218             mJpegSettings->gps_coordinates[i] = NULL;
3219         }
3220     }
3221 
3222     if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
3223         mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
3224         *(mJpegSettings->gps_timestamp) =
3225             jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
3226     } else {
3227         mJpegSettings->gps_timestamp = NULL;
3228     }
3229 
3230     if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
3231         int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
3232         for (int i = 0; i < len; i++) {
3233             mJpegSettings->gps_processing_method[i] =
3234                 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
3235         }
3236         if (mJpegSettings->gps_processing_method[len-1] != '\0') {
3237             mJpegSettings->gps_processing_method[len] = '\0';
3238         }
3239     } else {
3240         mJpegSettings->gps_processing_method[0] = '\0';
3241     }
3242 
3243     mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
3244 
3245     mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
3246 
3247     if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
3248         mJpegSettings->lens_focal_length =
3249             jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
3250     }
3251     if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
3252         mJpegSettings->exposure_compensation =
3253             jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
3254     }
3255     mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
3256     mJpegSettings->max_jpeg_size = calcMaxJpegSize();
3257     mJpegSettings->is_jpeg_format = true;
3258     mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
3259     mJpegSettings->f_number = gCamCapability[mCameraId]->apertures[0];
3260 
3261     if (jpeg_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
3262         mJpegSettings->wb =
3263             jpeg_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
3264     } else {
3265         mJpegSettings->wb = 0;
3266     }
3267 
3268     if (jpeg_settings.exists(ANDROID_FLASH_MODE)) {
3269         mJpegSettings->flash =
3270             jpeg_settings.find(ANDROID_FLASH_MODE).data.u8[0];
3271     } else {
3272         mJpegSettings->flash = 0;
3273     }
3274 
3275 
3276     return 0;
3277 }
3278 
3279 /*===========================================================================
3280  * FUNCTION   : captureResultCb
3281  *
3282  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
3283  *
3284  * PARAMETERS :
3285  *   @frame  : frame information from mm-camera-interface
3286  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
3287  *   @userdata: userdata
3288  *
3289  * RETURN     : NONE
3290  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,void * userdata)3291 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
3292                 camera3_stream_buffer_t *buffer,
3293                 uint32_t frame_number, void *userdata)
3294 {
3295     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
3296     if (hw == NULL) {
3297         ALOGE("%s: Invalid hw %p", __func__, hw);
3298         return;
3299     }
3300 
3301     hw->captureResultCb(metadata, buffer, frame_number);
3302     return;
3303 }
3304 
3305 
3306 /*===========================================================================
3307  * FUNCTION   : initialize
3308  *
3309  * DESCRIPTION: Pass framework callback pointers to HAL
3310  *
3311  * PARAMETERS :
3312  *
3313  *
3314  * RETURN     : Success : 0
3315  *              Failure: -ENODEV
3316  *==========================================================================*/
3317 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)3318 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
3319                                   const camera3_callback_ops_t *callback_ops)
3320 {
3321     ALOGV("%s: E", __func__);
3322     QCamera3HardwareInterface *hw =
3323         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3324     if (!hw) {
3325         ALOGE("%s: NULL camera device", __func__);
3326         return -ENODEV;
3327     }
3328 
3329     int rc = hw->initialize(callback_ops);
3330     ALOGV("%s: X", __func__);
3331     return rc;
3332 }
3333 
3334 /*===========================================================================
3335  * FUNCTION   : configure_streams
3336  *
3337  * DESCRIPTION:
3338  *
3339  * PARAMETERS :
3340  *
3341  *
3342  * RETURN     : Success: 0
3343  *              Failure: -EINVAL (if stream configuration is invalid)
3344  *                       -ENODEV (fatal error)
3345  *==========================================================================*/
3346 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)3347 int QCamera3HardwareInterface::configure_streams(
3348         const struct camera3_device *device,
3349         camera3_stream_configuration_t *stream_list)
3350 {
3351     ALOGV("%s: E", __func__);
3352     QCamera3HardwareInterface *hw =
3353         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3354     if (!hw) {
3355         ALOGE("%s: NULL camera device", __func__);
3356         return -ENODEV;
3357     }
3358     int rc = hw->configureStreams(stream_list);
3359     ALOGV("%s: X", __func__);
3360     return rc;
3361 }
3362 
3363 /*===========================================================================
3364  * FUNCTION   : register_stream_buffers
3365  *
3366  * DESCRIPTION: Register stream buffers with the device
3367  *
3368  * PARAMETERS :
3369  *
3370  * RETURN     :
3371  *==========================================================================*/
register_stream_buffers(const struct camera3_device * device,const camera3_stream_buffer_set_t * buffer_set)3372 int QCamera3HardwareInterface::register_stream_buffers(
3373         const struct camera3_device *device,
3374         const camera3_stream_buffer_set_t *buffer_set)
3375 {
3376     ALOGV("%s: E", __func__);
3377     QCamera3HardwareInterface *hw =
3378         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3379     if (!hw) {
3380         ALOGE("%s: NULL camera device", __func__);
3381         return -ENODEV;
3382     }
3383     int rc = hw->registerStreamBuffers(buffer_set);
3384     ALOGV("%s: X", __func__);
3385     return rc;
3386 }
3387 
3388 /*===========================================================================
3389  * FUNCTION   : construct_default_request_settings
3390  *
3391  * DESCRIPTION: Configure a settings buffer to meet the required use case
3392  *
3393  * PARAMETERS :
3394  *
3395  *
3396  * RETURN     : Success: Return valid metadata
3397  *              Failure: Return NULL
3398  *==========================================================================*/
3399 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)3400     construct_default_request_settings(const struct camera3_device *device,
3401                                         int type)
3402 {
3403 
3404     ALOGV("%s: E", __func__);
3405     camera_metadata_t* fwk_metadata = NULL;
3406     QCamera3HardwareInterface *hw =
3407         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3408     if (!hw) {
3409         ALOGE("%s: NULL camera device", __func__);
3410         return NULL;
3411     }
3412 
3413     fwk_metadata = hw->translateCapabilityToMetadata(type);
3414 
3415     ALOGV("%s: X", __func__);
3416     return fwk_metadata;
3417 }
3418 
3419 /*===========================================================================
3420  * FUNCTION   : process_capture_request
3421  *
3422  * DESCRIPTION:
3423  *
3424  * PARAMETERS :
3425  *
3426  *
3427  * RETURN     :
3428  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)3429 int QCamera3HardwareInterface::process_capture_request(
3430                     const struct camera3_device *device,
3431                     camera3_capture_request_t *request)
3432 {
3433     ALOGV("%s: E", __func__);
3434     QCamera3HardwareInterface *hw =
3435         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3436     if (!hw) {
3437         ALOGE("%s: NULL camera device", __func__);
3438         return -EINVAL;
3439     }
3440 
3441     int rc = hw->processCaptureRequest(request);
3442     ALOGV("%s: X", __func__);
3443     return rc;
3444 }
3445 
3446 /*===========================================================================
3447  * FUNCTION   : get_metadata_vendor_tag_ops
3448  *
3449  * DESCRIPTION:
3450  *
3451  * PARAMETERS :
3452  *
3453  *
3454  * RETURN     :
3455  *==========================================================================*/
3456 
get_metadata_vendor_tag_ops(const struct camera3_device * device,vendor_tag_query_ops_t * ops)3457 void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
3458                 const struct camera3_device *device,
3459                 vendor_tag_query_ops_t* ops)
3460 {
3461     ALOGV("%s: E", __func__);
3462     QCamera3HardwareInterface *hw =
3463         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3464     if (!hw) {
3465         ALOGE("%s: NULL camera device", __func__);
3466         return;
3467     }
3468 
3469     hw->getMetadataVendorTagOps(ops);
3470     ALOGV("%s: X", __func__);
3471     return;
3472 }
3473 
3474 /*===========================================================================
3475  * FUNCTION   : dump
3476  *
3477  * DESCRIPTION:
3478  *
3479  * PARAMETERS :
3480  *
3481  *
3482  * RETURN     :
3483  *==========================================================================*/
3484 
dump(const struct camera3_device * device,int fd)3485 void QCamera3HardwareInterface::dump(
3486                 const struct camera3_device *device, int fd)
3487 {
3488     ALOGV("%s: E", __func__);
3489     QCamera3HardwareInterface *hw =
3490         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
3491     if (!hw) {
3492         ALOGE("%s: NULL camera device", __func__);
3493         return;
3494     }
3495 
3496     hw->dump(fd);
3497     ALOGV("%s: X", __func__);
3498     return;
3499 }
3500 
3501 /*===========================================================================
3502  * FUNCTION   : close_camera_device
3503  *
3504  * DESCRIPTION:
3505  *
3506  * PARAMETERS :
3507  *
3508  *
3509  * RETURN     :
3510  *==========================================================================*/
close_camera_device(struct hw_device_t * device)3511 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
3512 {
3513     ALOGV("%s: E", __func__);
3514     int ret = NO_ERROR;
3515     QCamera3HardwareInterface *hw =
3516         reinterpret_cast<QCamera3HardwareInterface *>(
3517             reinterpret_cast<camera3_device_t *>(device)->priv);
3518     if (!hw) {
3519         ALOGE("NULL camera device");
3520         return BAD_VALUE;
3521     }
3522     delete hw;
3523 
3524     pthread_mutex_lock(&mCameraSessionLock);
3525     mCameraSessionActive = 0;
3526     pthread_mutex_unlock(&mCameraSessionLock);
3527     ALOGV("%s: X", __func__);
3528     return ret;
3529 }
3530 
3531 /*===========================================================================
3532  * FUNCTION   : getWaveletDenoiseProcessPlate
3533  *
3534  * DESCRIPTION: query wavelet denoise process plate
3535  *
3536  * PARAMETERS : None
3537  *
3538  * RETURN     : WNR prcocess plate vlaue
3539  *==========================================================================*/
getWaveletDenoiseProcessPlate()3540 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
3541 {
3542     char prop[PROPERTY_VALUE_MAX];
3543     memset(prop, 0, sizeof(prop));
3544     property_get("persist.denoise.process.plates", prop, "0");
3545     int processPlate = atoi(prop);
3546     switch(processPlate) {
3547     case 0:
3548         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
3549     case 1:
3550         return CAM_WAVELET_DENOISE_CBCR_ONLY;
3551     case 2:
3552         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3553     case 3:
3554         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
3555     default:
3556         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
3557     }
3558 }
3559 
3560 /*===========================================================================
3561  * FUNCTION   : needRotationReprocess
3562  *
3563  * DESCRIPTION: if rotation needs to be done by reprocess in pp
3564  *
3565  * PARAMETERS : none
3566  *
3567  * RETURN     : true: needed
3568  *              false: no need
3569  *==========================================================================*/
needRotationReprocess()3570 bool QCamera3HardwareInterface::needRotationReprocess()
3571 {
3572 
3573     if (!mJpegSettings->is_jpeg_format) {
3574         // RAW image, no need to reprocess
3575         return false;
3576     }
3577 
3578     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
3579         mJpegSettings->jpeg_orientation > 0) {
3580         // current rotation is not zero, and pp has the capability to process rotation
3581         ALOGD("%s: need do reprocess for rotation", __func__);
3582         return true;
3583     }
3584 
3585     return false;
3586 }
3587 
3588 /*===========================================================================
3589  * FUNCTION   : needReprocess
3590  *
3591  * DESCRIPTION: if reprocess in needed
3592  *
3593  * PARAMETERS : none
3594  *
3595  * RETURN     : true: needed
3596  *              false: no need
3597  *==========================================================================*/
needReprocess()3598 bool QCamera3HardwareInterface::needReprocess()
3599 {
3600     if (!mJpegSettings->is_jpeg_format) {
3601         // RAW image, no need to reprocess
3602         return false;
3603     }
3604 
3605     if ((mJpegSettings->min_required_pp_mask > 0) ||
3606          isWNREnabled()) {
3607         // TODO: add for ZSL HDR later
3608         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
3609         ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
3610         return true;
3611     }
3612     return needRotationReprocess();
3613 }
3614 
3615 /*===========================================================================
3616  * FUNCTION   : addOnlineReprocChannel
3617  *
3618  * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
3619  *              coming from input channel
3620  *
3621  * PARAMETERS :
3622  *   @pInputChannel : ptr to input channel whose frames will be post-processed
3623  *
3624  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
3625  *==========================================================================*/
addOnlineReprocChannel(QCamera3Channel * pInputChannel,QCamera3PicChannel * picChHandle)3626 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
3627                                                       QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
3628 {
3629     int32_t rc = NO_ERROR;
3630     QCamera3ReprocessChannel *pChannel = NULL;
3631     if (pInputChannel == NULL) {
3632         ALOGE("%s: input channel obj is NULL", __func__);
3633         return NULL;
3634     }
3635 
3636     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
3637             mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
3638     if (NULL == pChannel) {
3639         ALOGE("%s: no mem for reprocess channel", __func__);
3640         return NULL;
3641     }
3642 
3643     // Capture channel, only need snapshot and postview streams start together
3644     mm_camera_channel_attr_t attr;
3645     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
3646     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
3647     attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
3648     rc = pChannel->initialize();
3649     if (rc != NO_ERROR) {
3650         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
3651         delete pChannel;
3652         return NULL;
3653     }
3654 
3655     // pp feature config
3656     cam_pp_feature_config_t pp_config;
3657     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
3658     if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
3659         pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
3660         pp_config.sharpness = 10;
3661     }
3662 
3663     if (isWNREnabled()) {
3664         pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
3665         pp_config.denoise2d.denoise_enable = 1;
3666         pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
3667     }
3668     if (needRotationReprocess()) {
3669         pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
3670         int rotation = mJpegSettings->jpeg_orientation;
3671         if (rotation == 0) {
3672             pp_config.rotation = ROTATE_0;
3673         } else if (rotation == 90) {
3674             pp_config.rotation = ROTATE_90;
3675         } else if (rotation == 180) {
3676             pp_config.rotation = ROTATE_180;
3677         } else if (rotation == 270) {
3678             pp_config.rotation = ROTATE_270;
3679         }
3680     }
3681 
3682    rc = pChannel->addReprocStreamsFromSource(pp_config,
3683                                              pInputChannel,
3684                                              mMetadataChannel);
3685 
3686     if (rc != NO_ERROR) {
3687         delete pChannel;
3688         return NULL;
3689     }
3690     return pChannel;
3691 }
3692 
getMaxUnmatchedFramesInQueue()3693 int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
3694 {
3695     return gCamCapability[mCameraId]->min_num_pp_bufs;
3696 }
3697 
isWNREnabled()3698 bool QCamera3HardwareInterface::isWNREnabled() {
3699     return gCamCapability[mCameraId]->isWnrSupported;
3700 }
3701 
3702 }; //end namespace qcamera
3703