1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera2HWI"
31 
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
37 #include STAT_H
38 #include <utils/Errors.h>
39 
40 // Camera dependencies
41 #include "QCamera2HWI.h"
42 #include "QCameraTrace.h"
43 
44 extern "C" {
45 #include "mm_camera_dbg.h"
46 }
47 
48 namespace qcamera {
49 
50 /*===========================================================================
51  * FUNCTION   : zsl_channel_cb
52  *
53  * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
54  *              mm-camera-interface
55  *
56  * PARAMETERS :
57  *   @recvd_frame : received super buffer
58  *   @userdata    : user data ptr
59  *
60  * RETURN    : None
61  *
62  * NOTE      : recvd_frame will be released after this call by caller, so if
63  *             async operation needed for recvd_frame, it's our responsibility
64  *             to save a copy for this variable to be used later.
65  *==========================================================================*/
zsl_channel_cb(mm_camera_super_buf_t * recvd_frame,void * userdata)66 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
67                                                void *userdata)
68 {
69     ATRACE_CALL();
70     LOGH("[KPI Perf]: E");
71     char value[PROPERTY_VALUE_MAX];
72     bool dump_raw = false;
73     bool log_matching = false;
74     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
75     if (pme == NULL ||
76         pme->mCameraHandle == NULL ||
77         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
78        LOGE("camera obj not valid");
79        return;
80     }
81 
82     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
83     if (pChannel == NULL ||
84         pChannel->getMyHandle() != recvd_frame->ch_id) {
85         LOGE("ZSL channel doesn't exist, return here");
86         return;
87     }
88 
89     if(pme->mParameters.isSceneSelectionEnabled() &&
90             !pme->m_stateMachine.isCaptureRunning()) {
91         pme->selectScene(pChannel, recvd_frame);
92         pChannel->bufDone(recvd_frame);
93         return;
94     }
95 
96     LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
97            recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
98     if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
99         qcamera_sm_internal_evt_payload_t *payload =
100                 (qcamera_sm_internal_evt_payload_t *)malloc(
101                         sizeof(qcamera_sm_internal_evt_payload_t));
102         if (NULL != payload) {
103             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
104             payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
105             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
106             if (rc != NO_ERROR) {
107                 LOGE("processEvt for retro AEC unlock failed");
108                 free(payload);
109                 payload = NULL;
110             }
111         } else {
112             LOGE("No memory for retro AEC event");
113         }
114     }
115 
116     // Check if retro-active frames are completed and camera is
117     // ready to go ahead with LED estimation for regular frames
118     if (recvd_frame->bReadyForPrepareSnapshot) {
119         // Send an event
120         LOGD("Ready for Prepare Snapshot, signal ");
121         qcamera_sm_internal_evt_payload_t *payload =
122                     (qcamera_sm_internal_evt_payload_t *)malloc(
123                     sizeof(qcamera_sm_internal_evt_payload_t));
124         if (NULL != payload) {
125             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
126             payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
127             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
128             if (rc != NO_ERROR) {
129                 LOGW("processEvt Ready for Snaphot failed");
130                 free(payload);
131                 payload = NULL;
132             }
133         } else {
134             LOGE("No memory for prepare signal event detect"
135                     " qcamera_sm_internal_evt_payload_t");
136         }
137     }
138 
139     /* indicate the parent that capture is done */
140     pme->captureDone();
141 
142     // save a copy for the superbuf
143     mm_camera_super_buf_t* frame =
144                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
145     if (frame == NULL) {
146         LOGE("Error allocating memory to save received_frame structure.");
147         pChannel->bufDone(recvd_frame);
148         return;
149     }
150     *frame = *recvd_frame;
151 
152     if (recvd_frame->num_bufs > 0) {
153         LOGI("[KPI Perf]: superbuf frame_idx %d",
154             recvd_frame->bufs[0]->frame_idx);
155     }
156 
157     // DUMP RAW if available
158     property_get("persist.camera.zsl_raw", value, "0");
159     dump_raw = atoi(value) > 0 ? true : false;
160     if (dump_raw) {
161         for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
162             if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
163                 mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
164                 QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
165                 if (NULL != pStream) {
166                     pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
167                 }
168                 break;
169             }
170         }
171     }
172 
173     for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
174         if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
175             mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
176             QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
177             if (NULL != pStream) {
178                 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
179             }
180             break;
181         }
182     }
183     //
184     // whether need FD Metadata along with Snapshot frame in ZSL mode
185     if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
186         //Need Face Detection result for snapshot frames
187         //Get the Meta Data frames
188         mm_camera_buf_def_t *pMetaFrame = NULL;
189         for (uint32_t i = 0; i < frame->num_bufs; i++) {
190             QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
191             if (pStream != NULL) {
192                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
193                     pMetaFrame = frame->bufs[i]; //find the metadata
194                     break;
195                 }
196             }
197         }
198 
199         if(pMetaFrame != NULL){
200             metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
201             //send the face detection info
202             cam_faces_data_t faces_data;
203             pme->fillFacesData(faces_data, pMetaData);
204             //HARD CODE here before MCT can support
205             faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
206 
207             qcamera_sm_internal_evt_payload_t *payload =
208                 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
209             if (NULL != payload) {
210                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
211                 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
212                 payload->faces_data = faces_data;
213                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
214                 if (rc != NO_ERROR) {
215                     LOGW("processEvt face_detection_result failed");
216                     free(payload);
217                     payload = NULL;
218                 }
219             } else {
220                 LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
221             }
222         }
223     }
224 
225     property_get("persist.camera.dumpmetadata", value, "0");
226     int32_t enabled = atoi(value);
227     if (enabled) {
228         mm_camera_buf_def_t *pMetaFrame = NULL;
229         QCameraStream *pStream = NULL;
230         for (uint32_t i = 0; i < frame->num_bufs; i++) {
231             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
232             if (pStream != NULL) {
233                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
234                     pMetaFrame = frame->bufs[i];
235                     if (pMetaFrame != NULL &&
236                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
237                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
238                     }
239                     break;
240                 }
241             }
242         }
243     }
244 
245     property_get("persist.camera.zsl_matching", value, "0");
246     log_matching = atoi(value) > 0 ? true : false;
247     if (log_matching) {
248         LOGH("ZSL super buffer contains:");
249         QCameraStream *pStream = NULL;
250         for (uint32_t i = 0; i < frame->num_bufs; i++) {
251             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
252             if (pStream != NULL ) {
253                 LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
254                         frame->bufs[i]->buf_idx,
255                         frame->bufs[i]->frame_idx,
256                         pStream->getMyType(),
257                         frame->bufs[i]->ts.tv_sec,
258                         frame->bufs[i]->ts.tv_nsec);
259             }
260         }
261     }
262 
263     // Wait on Postproc initialization if needed
264     // then send to postprocessor
265     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
266             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
267         LOGE("Failed to trigger process data");
268         pChannel->bufDone(recvd_frame);
269         free(frame);
270         frame = NULL;
271         return;
272     }
273 
274     LOGH("[KPI Perf]: X");
275 }
276 
277 /*===========================================================================
278  * FUNCTION   : selectScene
279  *
280  * DESCRIPTION: send a preview callback when a specific selected scene is applied
281  *
282  * PARAMETERS :
283  *   @pChannel: Camera channel
284  *   @frame   : Bundled super buffer
285  *
286  * RETURN     : int32_t type of status
287  *              NO_ERROR  -- success
288  *              none-zero failure code
289  *==========================================================================*/
selectScene(QCameraChannel * pChannel,mm_camera_super_buf_t * frame)290 int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
291         mm_camera_super_buf_t *frame)
292 {
293     mm_camera_buf_def_t *pMetaFrame = NULL;
294     QCameraStream *pStream = NULL;
295     int32_t rc = NO_ERROR;
296 
297     if ((NULL == frame) || (NULL == pChannel)) {
298         LOGE("Invalid scene select input");
299         return BAD_VALUE;
300     }
301 
302     cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
303     if (CAM_SCENE_MODE_MAX == selectedScene) {
304         LOGL("No selected scene");
305         return NO_ERROR;
306     }
307 
308     for (uint32_t i = 0; i < frame->num_bufs; i++) {
309         pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
310         if (pStream != NULL) {
311             if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
312                 pMetaFrame = frame->bufs[i];
313                 break;
314             }
315         }
316     }
317 
318     if (NULL == pMetaFrame) {
319         LOGE("No metadata buffer found in scene select super buffer");
320         return NO_INIT;
321     }
322 
323     metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
324 
325     IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
326         if ((*scene == selectedScene) &&
327                 (mDataCb != NULL) &&
328                 (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
329             mm_camera_buf_def_t *preview_frame = NULL;
330             for (uint32_t i = 0; i < frame->num_bufs; i++) {
331                 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
332                 if (pStream != NULL) {
333                     if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
334                         preview_frame = frame->bufs[i];
335                         break;
336                     }
337                 }
338             }
339             if (preview_frame) {
340                 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
341                 uint32_t idx = preview_frame->buf_idx;
342                 rc = sendPreviewCallback(pStream, memory, idx);
343                 if (NO_ERROR != rc) {
344                     LOGE("Error triggering scene select preview callback");
345                 } else {
346                     mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
347                 }
348             } else {
349                 LOGE("No preview buffer found in scene select super buffer");
350                 return NO_INIT;
351             }
352         }
353     } else {
354         LOGE("No current scene metadata!");
355         rc = NO_INIT;
356     }
357 
358     return rc;
359 }
360 
361 /*===========================================================================
362  * FUNCTION   : capture_channel_cb_routine
363  *
364  * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
365  *              mm-camera-interface
366  *
367  * PARAMETERS :
368  *   @recvd_frame : received super buffer
369  *   @userdata    : user data ptr
370  *
371  * RETURN    : None
372  *
373  * NOTE      : recvd_frame will be released after this call by caller, so if
374  *             async operation needed for recvd_frame, it's our responsibility
375  *             to save a copy for this variable to be used later.
376 *==========================================================================*/
capture_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)377 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
378                                                            void *userdata)
379 {
380     KPI_ATRACE_CALL();
381     char value[PROPERTY_VALUE_MAX];
382     LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
383     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
384     if (pme == NULL ||
385         pme->mCameraHandle == NULL ||
386         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
387         LOGE("camera obj not valid");
388         return;
389     }
390 
391     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
392     if (pChannel == NULL ||
393         pChannel->getMyHandle() != recvd_frame->ch_id) {
394         LOGE("Capture channel doesn't exist, return here");
395         return;
396     }
397 
398     // save a copy for the superbuf
399     mm_camera_super_buf_t* frame =
400                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
401     if (frame == NULL) {
402         LOGE("Error allocating memory to save received_frame structure.");
403         pChannel->bufDone(recvd_frame);
404         return;
405     }
406     *frame = *recvd_frame;
407 
408     if (recvd_frame->num_bufs > 0) {
409         LOGI("[KPI Perf]: superbuf frame_idx %d",
410                 recvd_frame->bufs[0]->frame_idx);
411     }
412 
413     for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
414         if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
415             mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
416             QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
417             if ( NULL != pStream ) {
418                 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
419             }
420             break;
421         }
422     }
423 
424     property_get("persist.camera.dumpmetadata", value, "0");
425     int32_t enabled = atoi(value);
426     if (enabled) {
427         mm_camera_buf_def_t *pMetaFrame = NULL;
428         QCameraStream *pStream = NULL;
429         for (uint32_t i = 0; i < frame->num_bufs; i++) {
430             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
431             if (pStream != NULL) {
432                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
433                     pMetaFrame = frame->bufs[i]; //find the metadata
434                     if (pMetaFrame != NULL &&
435                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
436                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
437                     }
438                     break;
439                 }
440             }
441         }
442     }
443 
444     // Wait on Postproc initialization if needed
445     // then send to postprocessor
446     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
447             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
448         LOGE("Failed to trigger process data");
449         pChannel->bufDone(recvd_frame);
450         free(frame);
451         frame = NULL;
452         return;
453     }
454 
455 /* START of test register face image for face authentication */
456 #ifdef QCOM_TEST_FACE_REGISTER_FACE
457     static uint8_t bRunFaceReg = 1;
458 
459     if (bRunFaceReg > 0) {
460         // find snapshot frame
461         QCameraStream *main_stream = NULL;
462         mm_camera_buf_def_t *main_frame = NULL;
463         for (int i = 0; i < recvd_frame->num_bufs; i++) {
464             QCameraStream *pStream =
465                 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
466             if (pStream != NULL) {
467                 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
468                     main_stream = pStream;
469                     main_frame = recvd_frame->bufs[i];
470                     break;
471                 }
472             }
473         }
474         if (main_stream != NULL && main_frame != NULL) {
475             int32_t faceId = -1;
476             cam_pp_offline_src_config_t config;
477             memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
478             config.num_of_bufs = 1;
479             main_stream->getFormat(config.input_fmt);
480             main_stream->getFrameDimension(config.input_dim);
481             main_stream->getFrameOffset(config.input_buf_planes.plane_info);
482             LOGH("DEBUG: registerFaceImage E");
483             int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
484             LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
485             bRunFaceReg = 0;
486         }
487     }
488 
489 #endif
490 /* END of test register face image for face authentication */
491 
492     LOGH("[KPI Perf]: X");
493 }
494 #ifdef TARGET_TS_MAKEUP
TsMakeupProcess_Preview(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)495 bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
496         QCameraStream * pStream) {
497     LOGD("begin");
498     bool bRet = false;
499     if (pStream == NULL || pFrame == NULL) {
500         bRet = false;
501         LOGH("pStream == NULL || pFrame == NULL");
502     } else {
503         bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
504     }
505     LOGD("end bRet = %d ",bRet);
506     return bRet;
507 }
508 
TsMakeupProcess_Snapshot(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)509 bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
510         QCameraStream * pStream) {
511     LOGD("begin");
512     bool bRet = false;
513     if (pStream == NULL || pFrame == NULL) {
514         bRet = false;
515         LOGH("pStream == NULL || pFrame == NULL");
516     } else {
517         cam_frame_len_offset_t offset;
518         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
519         pStream->getFrameOffset(offset);
520 
521         cam_dimension_t dim;
522         pStream->getFrameDimension(dim);
523 
524         unsigned char *yBuf  = (unsigned char*)pFrame->buffer;
525         unsigned char *uvBuf = yBuf + offset.mp[0].len;
526         TSMakeupDataEx inMakeupData;
527         inMakeupData.frameWidth  = dim.width;
528         inMakeupData.frameHeight = dim.height;
529         inMakeupData.yBuf  = yBuf;
530         inMakeupData.uvBuf = uvBuf;
531         inMakeupData.yStride  = offset.mp[0].stride;
532         inMakeupData.uvStride = offset.mp[1].stride;
533         LOGD("detect begin");
534         TSHandle fd_handle = ts_detectface_create_context();
535         if (fd_handle != NULL) {
536             cam_format_t fmt;
537             pStream->getFormat(fmt);
538             int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
539             LOGD("ts_detectface_detect iret = %d",iret);
540             if (iret <= 0) {
541                 bRet = false;
542             } else {
543                 TSRect faceRect;
544                 memset(&faceRect,-1,sizeof(TSRect));
545                 iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
546                 LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
547                         "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
548                         ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
549                 bRet = TsMakeupProcess(pFrame,pStream,faceRect);
550             }
551             ts_detectface_destroy_context(&fd_handle);
552             fd_handle = NULL;
553         } else {
554             LOGH("fd_handle == NULL");
555         }
556         LOGD("detect end");
557     }
558     LOGD("end bRet = %d ",bRet);
559     return bRet;
560 }
561 
TsMakeupProcess(mm_camera_buf_def_t * pFrame,QCameraStream * pStream,TSRect & faceRect)562 bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
563         QCameraStream * pStream,TSRect& faceRect) {
564     bool bRet = false;
565     LOGD("begin");
566     if (pStream == NULL || pFrame == NULL) {
567         LOGH("pStream == NULL || pFrame == NULL ");
568         return false;
569     }
570 
571     int whiteLevel, cleanLevel;
572     bool enableMakeup = (faceRect.left > -1) &&
573             (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
574     if (enableMakeup) {
575         cam_dimension_t dim;
576         cam_frame_len_offset_t offset;
577         pStream->getFrameDimension(dim);
578         pStream->getFrameOffset(offset);
579         unsigned char *tempOriBuf = NULL;
580 
581         tempOriBuf = (unsigned char*)pFrame->buffer;
582         unsigned char *yBuf = tempOriBuf;
583         unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
584         unsigned char *tmpBuf = new unsigned char[offset.frame_len];
585         if (tmpBuf == NULL) {
586             LOGH("tmpBuf == NULL ");
587             return false;
588         }
589         TSMakeupDataEx inMakeupData, outMakeupData;
590         whiteLevel =  whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
591         cleanLevel =  cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
592         inMakeupData.frameWidth = dim.width;  // NV21 Frame width  > 0
593         inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
594         inMakeupData.yBuf =  yBuf; //  Y buffer pointer
595         inMakeupData.uvBuf = uvBuf; // VU buffer pointer
596         inMakeupData.yStride  = offset.mp[0].stride;
597         inMakeupData.uvStride = offset.mp[1].stride;
598         outMakeupData.frameWidth = dim.width; // NV21 Frame width  > 0
599         outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
600         outMakeupData.yBuf =  tmpBuf; //  Y buffer pointer
601         outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
602         outMakeupData.yStride  = offset.mp[0].stride;
603         outMakeupData.uvStride = offset.mp[1].stride;
604         LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
605             faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
606         ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
607         memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
608         QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
609         memory->cleanCache(pFrame->buf_idx);
610         if (tmpBuf != NULL) {
611             delete[] tmpBuf;
612             tmpBuf = NULL;
613         }
614     }
615     LOGD("end bRet = %d ",bRet);
616     return bRet;
617 }
618 #endif
619 /*===========================================================================
620  * FUNCTION   : postproc_channel_cb_routine
621  *
622  * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
623  *              mm-camera-interface
624  *
625  * PARAMETERS :
626  *   @recvd_frame : received super buffer
627  *   @userdata    : user data ptr
628  *
629  * RETURN    : None
630  *
631  * NOTE      : recvd_frame will be released after this call by caller, so if
632  *             async operation needed for recvd_frame, it's our responsibility
633  *             to save a copy for this variable to be used later.
634 *==========================================================================*/
postproc_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)635 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
636                                                             void *userdata)
637 {
638     ATRACE_CALL();
639     LOGH("[KPI Perf]: E");
640     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
641     if (pme == NULL ||
642         pme->mCameraHandle == NULL ||
643         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
644         LOGE("camera obj not valid");
645         return;
646     }
647 
648     // save a copy for the superbuf
649     mm_camera_super_buf_t* frame =
650                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
651     if (frame == NULL) {
652         LOGE("Error allocating memory to save received_frame structure.");
653         return;
654     }
655     *frame = *recvd_frame;
656 
657     if (recvd_frame->num_bufs > 0) {
658         LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
659     }
660     // Wait on JPEG create session
661     pme->waitDeferredWork(pme->mJpegJob);
662 
663     // send to postprocessor
664     pme->m_postprocessor.processPPData(frame);
665 
666     ATRACE_INT("Camera:Reprocess", 0);
667     LOGH("[KPI Perf]: X");
668 }
669 
670 /*===========================================================================
671  * FUNCTION   : synchronous_stream_cb_routine
672  *
673  * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
674  *
675  * PARAMETERS :
676  *   @super_frame : received super buffer
677  *   @stream      : stream object
678  *   @userdata    : user data ptr
679  *
680  * RETURN    : None
681  *
682  * NOTE      : This Function is excecuted in mm-interface context.
683  *             Avoid adding latency on this thread.
684  *==========================================================================*/
synchronous_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)685 void QCamera2HardwareInterface::synchronous_stream_cb_routine(
686         mm_camera_super_buf_t *super_frame, QCameraStream * stream,
687         void *userdata)
688 {
689     nsecs_t frameTime = 0, mPreviewTimestamp = 0;
690     int err = NO_ERROR;
691 
692     ATRACE_CALL();
693     LOGH("[KPI Perf] : BEGIN");
694     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
695     QCameraGrallocMemory *memory = NULL;
696 
697     if (pme == NULL) {
698         LOGE("Invalid hardware object");
699         return;
700     }
701     if (super_frame == NULL) {
702         LOGE("Invalid super buffer");
703         return;
704     }
705     mm_camera_buf_def_t *frame = super_frame->bufs[0];
706     if (NULL == frame) {
707         LOGE("Frame is NULL");
708         return;
709     }
710 
711     if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
712         LOGE("This is only for PREVIEW stream for now");
713         return;
714     }
715 
716     if(pme->m_bPreviewStarted) {
717         LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
718         pme->m_bPreviewStarted = false;
719     }
720 
721     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
722         pthread_mutex_lock(&pme->mGrallocLock);
723         pme->mLastPreviewFrameID = frame->frame_idx;
724         pthread_mutex_unlock(&pme->mGrallocLock);
725         LOGH("preview is not running, no need to process");
726         return;
727     }
728 
729     frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
730     // Calculate the future presentation time stamp for displaying frames at regular interval
731     mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
732     stream->mStreamTimestamp = frameTime;
733     memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
734 
735 #ifdef TARGET_TS_MAKEUP
736     pme->TsMakeupProcess_Preview(frame,stream);
737 #endif
738 
739     // Enqueue  buffer to gralloc.
740     uint32_t idx = frame->buf_idx;
741     LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
742             pme, idx, frameTime, mPreviewTimestamp);
743     err = memory->enqueueBuffer(idx, mPreviewTimestamp);
744 
745     if (err == NO_ERROR) {
746         pthread_mutex_lock(&pme->mGrallocLock);
747         pme->mLastPreviewFrameID = frame->frame_idx;
748         pme->mEnqueuedBuffers++;
749         pthread_mutex_unlock(&pme->mGrallocLock);
750     } else {
751         LOGE("Enqueue Buffer failed");
752     }
753 
754     LOGH("[KPI Perf] : END");
755     return;
756 }
757 
758 /*===========================================================================
759  * FUNCTION   : preview_stream_cb_routine
760  *
761  * DESCRIPTION: helper function to handle preview frame from preview stream in
762  *              normal case with display.
763  *
764  * PARAMETERS :
765  *   @super_frame : received super buffer
766  *   @stream      : stream object
767  *   @userdata    : user data ptr
768  *
769  * RETURN    : None
770  *
771  * NOTE      : caller passes the ownership of super_frame, it's our
772  *             responsibility to free super_frame once it's done. The new
773  *             preview frame will be sent to display, and an older frame
774  *             will be dequeued from display and needs to be returned back
775  *             to kernel for future use.
776  *==========================================================================*/
preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)777 void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
778                                                           QCameraStream * stream,
779                                                           void *userdata)
780 {
781     KPI_ATRACE_CALL();
782     LOGH("[KPI Perf] : BEGIN");
783     int err = NO_ERROR;
784     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
785     QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
786     uint8_t dequeueCnt = 0;
787 
788     if (pme == NULL) {
789         LOGE("Invalid hardware object");
790         free(super_frame);
791         return;
792     }
793     if (memory == NULL) {
794         LOGE("Invalid memory object");
795         free(super_frame);
796         return;
797     }
798 
799     mm_camera_buf_def_t *frame = super_frame->bufs[0];
800     if (NULL == frame) {
801         LOGE("preview frame is NLUL");
802         free(super_frame);
803         return;
804     }
805 
806     // For instant capture and for instant AEC, keep track of the frame counter.
807     // This count will be used to check against the corresponding bound values.
808     if (pme->mParameters.isInstantAECEnabled() ||
809             pme->mParameters.isInstantCaptureEnabled()) {
810         pme->mInstantAecFrameCount++;
811     }
812 
813     pthread_mutex_lock(&pme->mGrallocLock);
814     if (!stream->isSyncCBEnabled()) {
815         pme->mLastPreviewFrameID = frame->frame_idx;
816     }
817     if (((!stream->isSyncCBEnabled()) &&
818             (!pme->needProcessPreviewFrame(frame->frame_idx))) ||
819             ((stream->isSyncCBEnabled()) &&
820             (memory->isBufOwnedByCamera(frame->buf_idx)))) {
821         //If buffer owned by camera, then it is not enqueued to display.
822         // bufDone it back to backend.
823         pthread_mutex_unlock(&pme->mGrallocLock);
824         LOGH("preview is not running, no need to process");
825         stream->bufDone(frame->buf_idx);
826         free(super_frame);
827         return;
828     } else {
829         pthread_mutex_unlock(&pme->mGrallocLock);
830     }
831 
832     if (pme->needDebugFps()) {
833         pme->debugShowPreviewFPS();
834     }
835 
836     uint32_t idx = frame->buf_idx;
837 
838     pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
839 
840     if(pme->m_bPreviewStarted) {
841        LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
842        pme->m_bPreviewStarted = false ;
843     }
844 
845     if (!stream->isSyncCBEnabled()) {
846         LOGD("Enqueue Buffer to display %d", idx);
847 #ifdef TARGET_TS_MAKEUP
848         pme->TsMakeupProcess_Preview(frame,stream);
849 #endif
850         err = memory->enqueueBuffer(idx);
851 
852         if (err == NO_ERROR) {
853             pthread_mutex_lock(&pme->mGrallocLock);
854             pme->mEnqueuedBuffers++;
855             dequeueCnt = pme->mEnqueuedBuffers;
856             pthread_mutex_unlock(&pme->mGrallocLock);
857         } else {
858             LOGE("Enqueue Buffer failed");
859         }
860     } else {
861         pthread_mutex_lock(&pme->mGrallocLock);
862         dequeueCnt = pme->mEnqueuedBuffers;
863         pthread_mutex_unlock(&pme->mGrallocLock);
864     }
865 
866     // Display the buffer.
867     LOGD("%p displayBuffer %d E", pme, idx);
868     uint8_t numMapped = memory->getMappable();
869 
870     for (uint8_t i = 0; i < dequeueCnt; i++) {
871         int dequeuedIdx = memory->dequeueBuffer();
872         if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
873             LOGE("Invalid dequeued buffer index %d from display",
874                    dequeuedIdx);
875             break;
876         } else {
877             pthread_mutex_lock(&pme->mGrallocLock);
878             pme->mEnqueuedBuffers--;
879             pthread_mutex_unlock(&pme->mGrallocLock);
880             if (dequeuedIdx >= numMapped) {
881                 // This buffer has not yet been mapped to the backend
882                 err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
883                 if (memory->checkIfAllBuffersMapped()) {
884                     // check if mapping is done for all the buffers
885                     // Signal the condition for create jpeg session
886                     Mutex::Autolock l(pme->mMapLock);
887                     pme->mMapCond.signal();
888                     LOGH("Mapping done for all bufs");
889                 } else {
890                     LOGH("All buffers are not yet mapped");
891                 }
892             }
893         }
894 
895         if (err < 0) {
896             LOGE("buffer mapping failed %d", err);
897         } else {
898             // Return dequeued buffer back to driver
899             err = stream->bufDone((uint32_t)dequeuedIdx);
900             if ( err < 0) {
901                 LOGW("stream bufDone failed %d", err);
902             }
903         }
904     }
905 
906     // Handle preview data callback
907     if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
908         if (pme->needSendPreviewCallback() &&
909                 (!pme->mParameters.isSceneSelectionEnabled())) {
910             int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
911             if (NO_ERROR != rc) {
912                 LOGW("Preview callback was not sent succesfully");
913             }
914         }
915     }
916 
917     free(super_frame);
918     LOGH("[KPI Perf] : END");
919     return;
920 }
921 
922 /*===========================================================================
923  * FUNCTION   : sendPreviewCallback
924  *
925  * DESCRIPTION: helper function for triggering preview callbacks
926  *
927  * PARAMETERS :
928  *   @stream    : stream object
929  *   @memory    : Stream memory allocator
930  *   @idx       : buffer index
931  *
932  * RETURN     : int32_t type of status
933  *              NO_ERROR  -- success
934  *              none-zero failure code
935  *==========================================================================*/
sendPreviewCallback(QCameraStream * stream,QCameraMemory * memory,uint32_t idx)936 int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
937         QCameraMemory *memory, uint32_t idx)
938 {
939     camera_memory_t *previewMem = NULL;
940     camera_memory_t *data = NULL;
941     camera_memory_t *dataToApp = NULL;
942     size_t previewBufSize = 0;
943     size_t previewBufSizeFromCallback = 0;
944     cam_dimension_t preview_dim;
945     cam_format_t previewFmt;
946     int32_t rc = NO_ERROR;
947     int32_t yStride = 0;
948     int32_t yScanline = 0;
949     int32_t uvStride = 0;
950     int32_t uvScanline = 0;
951     int32_t uStride = 0;
952     int32_t uScanline = 0;
953     int32_t vStride = 0;
954     int32_t vScanline = 0;
955     int32_t yStrideToApp = 0;
956     int32_t uvStrideToApp = 0;
957     int32_t yScanlineToApp = 0;
958     int32_t uvScanlineToApp = 0;
959     int32_t srcOffset = 0;
960     int32_t dstOffset = 0;
961     int32_t srcBaseOffset = 0;
962     int32_t dstBaseOffset = 0;
963     int i;
964 
965     if ((NULL == stream) || (NULL == memory)) {
966         LOGE("Invalid preview callback input");
967         return BAD_VALUE;
968     }
969 
970     cam_stream_info_t *streamInfo =
971             reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
972     if (NULL == streamInfo) {
973         LOGE("Invalid streamInfo");
974         return BAD_VALUE;
975     }
976 
977     stream->getFrameDimension(preview_dim);
978     stream->getFormat(previewFmt);
979 
980     yStrideToApp = preview_dim.width;
981     yScanlineToApp = preview_dim.height;
982     uvStrideToApp = yStrideToApp;
983     uvScanlineToApp = yScanlineToApp / 2;
984 
985     /* The preview buffer size in the callback should be
986      * (width*height*bytes_per_pixel). As all preview formats we support,
987      * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
988      * We need to put a check if some other formats are supported in future. */
989     if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
990         (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
991         (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
992         (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
993         (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
994         (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
995         if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
996             yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
997             yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
998             uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
999             uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1000             vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
1001             vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
1002 
1003             previewBufSize = (size_t)
1004                     (yStride * yScanline + uStride * uScanline + vStride * vScanline);
1005             previewBufSizeFromCallback = previewBufSize;
1006         } else {
1007             yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1008             yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1009             uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1010             uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1011 
1012             previewBufSize = (size_t)
1013                     ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
1014 
1015             previewBufSizeFromCallback = (size_t)
1016                     ((yStride * yScanline) + (uvStride * uvScanline));
1017         }
1018         if(previewBufSize == previewBufSizeFromCallback) {
1019             previewMem = mGetMemory(memory->getFd(idx),
1020                        previewBufSize, 1, mCallbackCookie);
1021             if (!previewMem || !previewMem->data) {
1022                 LOGE("mGetMemory failed.\n");
1023                 return NO_MEMORY;
1024             } else {
1025                 data = previewMem;
1026             }
1027         } else {
1028             data = memory->getMemory(idx, false);
1029             dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1030             if (!dataToApp || !dataToApp->data) {
1031                 LOGE("mGetMemory failed.\n");
1032                 return NO_MEMORY;
1033             }
1034 
1035             for (i = 0; i < preview_dim.height; i++) {
1036                 srcOffset = i * yStride;
1037                 dstOffset = i * yStrideToApp;
1038 
1039                 memcpy((unsigned char *) dataToApp->data + dstOffset,
1040                         (unsigned char *) data->data + srcOffset,
1041                         (size_t)yStrideToApp);
1042             }
1043 
1044             srcBaseOffset = yStride * yScanline;
1045             dstBaseOffset = yStrideToApp * yScanlineToApp;
1046 
1047             for (i = 0; i < preview_dim.height/2; i++) {
1048                 srcOffset = i * uvStride + srcBaseOffset;
1049                 dstOffset = i * uvStrideToApp + dstBaseOffset;
1050 
1051                 memcpy((unsigned char *) dataToApp->data + dstOffset,
1052                         (unsigned char *) data->data + srcOffset,
1053                         (size_t)yStrideToApp);
1054             }
1055         }
1056     } else {
1057         /*Invalid Buffer content. But can be used as a first preview frame trigger in
1058         framework/app */
1059         previewBufSize = (size_t)
1060                     ((yStrideToApp * yScanlineToApp) +
1061                     (uvStrideToApp * uvScanlineToApp));
1062         previewBufSizeFromCallback = 0;
1063         LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
1064                 previewBufSize);
1065         dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1066         if (!dataToApp || !dataToApp->data) {
1067             LOGE("mGetMemory failed.\n");
1068             return NO_MEMORY;
1069         }
1070     }
1071     qcamera_callback_argm_t cbArg;
1072     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1073     cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1074     cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1075     if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
1076             previewBufSize == previewBufSizeFromCallback) {
1077         cbArg.data = data;
1078     } else {
1079         cbArg.data = dataToApp;
1080     }
1081     if ( previewMem ) {
1082         cbArg.user_data = previewMem;
1083         cbArg.release_cb = releaseCameraMemory;
1084     } else if (dataToApp) {
1085         cbArg.user_data = dataToApp;
1086         cbArg.release_cb = releaseCameraMemory;
1087     }
1088     cbArg.cookie = this;
1089     rc = m_cbNotifier.notifyCallback(cbArg);
1090     if (rc != NO_ERROR) {
1091         LOGW("fail sending notification");
1092         if (previewMem) {
1093             previewMem->release(previewMem);
1094         } else if (dataToApp) {
1095             dataToApp->release(dataToApp);
1096         }
1097     }
1098 
1099     return rc;
1100 }
1101 
1102 /*===========================================================================
1103  * FUNCTION   : nodisplay_preview_stream_cb_routine
1104  *
1105  * DESCRIPTION: helper function to handle preview frame from preview stream in
1106  *              no-display case
1107  *
1108  * PARAMETERS :
1109  *   @super_frame : received super buffer
1110  *   @stream      : stream object
1111  *   @userdata    : user data ptr
1112  *
1113  * RETURN    : None
1114  *
1115  * NOTE      : caller passes the ownership of super_frame, it's our
1116  *             responsibility to free super_frame once it's done.
1117  *==========================================================================*/
nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1118 void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
1119                                                           mm_camera_super_buf_t *super_frame,
1120                                                           QCameraStream *stream,
1121                                                           void * userdata)
1122 {
1123     ATRACE_CALL();
1124     LOGH("[KPI Perf] E");
1125     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1126     if (pme == NULL ||
1127         pme->mCameraHandle == NULL ||
1128         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1129         LOGE("camera obj not valid");
1130         // simply free super frame
1131         free(super_frame);
1132         return;
1133     }
1134     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1135     if (NULL == frame) {
1136         LOGE("preview frame is NULL");
1137         free(super_frame);
1138         return;
1139     }
1140 
1141     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1142         LOGH("preview is not running, no need to process");
1143         stream->bufDone(frame->buf_idx);
1144         free(super_frame);
1145         return;
1146     }
1147 
1148     if (pme->needDebugFps()) {
1149         pme->debugShowPreviewFPS();
1150     }
1151 
1152     QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1153     camera_memory_t *preview_mem = NULL;
1154     if (previewMemObj != NULL) {
1155         preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1156     }
1157     if (NULL != previewMemObj && NULL != preview_mem) {
1158         pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
1159 
1160         if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
1161                 pme->needSendPreviewCallback() &&
1162                 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
1163             qcamera_callback_argm_t cbArg;
1164             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1165             cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1166             cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1167             cbArg.data = preview_mem;
1168             cbArg.user_data = (void *) &frame->buf_idx;
1169             cbArg.cookie = stream;
1170             cbArg.release_cb = returnStreamBuffer;
1171             int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1172             if (rc != NO_ERROR) {
1173                 LOGE ("fail sending data notify");
1174                 stream->bufDone(frame->buf_idx);
1175             }
1176         } else {
1177             stream->bufDone(frame->buf_idx);
1178         }
1179     }
1180     free(super_frame);
1181     LOGH("[KPI Perf] X");
1182 }
1183 
1184 /*===========================================================================
1185  * FUNCTION   : rdi_mode_stream_cb_routine
1186  *
1187  * DESCRIPTION: helper function to handle RDI frame from preview stream in
1188  *              rdi mode case
1189  *
1190  * PARAMETERS :
1191  *   @super_frame : received super buffer
1192  *   @stream      : stream object
1193  *   @userdata    : user data ptr
1194  *
1195  * RETURN    : None
1196  *
1197  * NOTE      : caller passes the ownership of super_frame, it's our
1198  *             responsibility to free super_frame once it's done.
1199  *==========================================================================*/
rdi_mode_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1200 void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
1201   mm_camera_super_buf_t *super_frame,
1202   QCameraStream *stream,
1203   void * userdata)
1204 {
1205     ATRACE_CALL();
1206     LOGH("RDI_DEBUG Enter");
1207     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1208     if (pme == NULL ||
1209         pme->mCameraHandle == NULL ||
1210         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1211         LOGE("camera obj not valid");
1212         free(super_frame);
1213         return;
1214     }
1215     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1216     if (NULL == frame) {
1217         LOGE("preview frame is NLUL");
1218         goto end;
1219     }
1220     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1221         LOGE("preview is not running, no need to process");
1222         stream->bufDone(frame->buf_idx);
1223         goto end;
1224     }
1225     if (pme->needDebugFps()) {
1226         pme->debugShowPreviewFPS();
1227     }
1228     // Non-secure Mode
1229     if (!pme->isSecureMode()) {
1230         QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1231         if (NULL == previewMemObj) {
1232             LOGE("previewMemObj is NULL");
1233             stream->bufDone(frame->buf_idx);
1234             goto end;
1235         }
1236 
1237         camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1238         if (NULL != preview_mem) {
1239             previewMemObj->cleanCache(frame->buf_idx);
1240             // Dump RAW frame
1241             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
1242             // Notify Preview callback frame
1243             if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1244                     pme->mDataCb != NULL &&
1245                     pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1246                 qcamera_callback_argm_t cbArg;
1247                 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1248                 cbArg.cb_type    = QCAMERA_DATA_CALLBACK;
1249                 cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
1250                 cbArg.data       = preview_mem;
1251                 cbArg.user_data = (void *) &frame->buf_idx;
1252                 cbArg.cookie     = stream;
1253                 cbArg.release_cb = returnStreamBuffer;
1254                 pme->m_cbNotifier.notifyCallback(cbArg);
1255             } else {
1256                 LOGE("preview_mem is NULL");
1257                 stream->bufDone(frame->buf_idx);
1258             }
1259         }
1260         else {
1261             LOGE("preview_mem is NULL");
1262             stream->bufDone(frame->buf_idx);
1263         }
1264     } else {
1265         // Secure Mode
1266         // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
1267         QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1268         if (NULL == previewMemObj) {
1269             LOGE("previewMemObj is NULL");
1270             stream->bufDone(frame->buf_idx);
1271             goto end;
1272         }
1273 
1274         int fd = previewMemObj->getFd(frame->buf_idx);
1275         LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
1276         if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1277                 pme->mDataCb != NULL &&
1278                 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1279             // Prepare Callback structure
1280             qcamera_callback_argm_t cbArg;
1281             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1282             cbArg.cb_type    = QCAMERA_NOTIFY_CALLBACK;
1283             cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
1284 #ifndef VANILLA_HAL
1285             cbArg.ext1       = CAMERA_FRAME_DATA_FD;
1286             cbArg.ext2       = fd;
1287 #endif
1288             cbArg.user_data  = (void *) &frame->buf_idx;
1289             cbArg.cookie     = stream;
1290             cbArg.release_cb = returnStreamBuffer;
1291             pme->m_cbNotifier.notifyCallback(cbArg);
1292         } else {
1293             LOGH("No need to process preview frame, return buffer");
1294             stream->bufDone(frame->buf_idx);
1295         }
1296     }
1297 end:
1298     free(super_frame);
1299     LOGH("RDI_DEBUG Exit");
1300     return;
1301 }
1302 
1303 /*===========================================================================
1304  * FUNCTION   : postview_stream_cb_routine
1305  *
1306  * DESCRIPTION: helper function to handle post frame from postview stream
1307  *
1308  * PARAMETERS :
1309  *   @super_frame : received super buffer
1310  *   @stream      : stream object
1311  *   @userdata    : user data ptr
1312  *
1313  * RETURN    : None
1314  *
1315  * NOTE      : caller passes the ownership of super_frame, it's our
1316  *             responsibility to free super_frame once it's done.
1317  *==========================================================================*/
postview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1318 void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1319                                                            QCameraStream *stream,
1320                                                            void *userdata)
1321 {
1322     ATRACE_CALL();
1323     int err = NO_ERROR;
1324     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1325     QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
1326 
1327     if (pme == NULL) {
1328         LOGE("Invalid hardware object");
1329         free(super_frame);
1330         return;
1331     }
1332     if (memory == NULL) {
1333         LOGE("Invalid memory object");
1334         free(super_frame);
1335         return;
1336     }
1337 
1338     LOGH("[KPI Perf] : BEGIN");
1339 
1340     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1341     if (NULL == frame) {
1342         LOGE("preview frame is NULL");
1343         free(super_frame);
1344         return;
1345     }
1346 
1347     QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
1348     if (NULL != memObj) {
1349         pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
1350     }
1351 
1352     // Return buffer back to driver
1353     err = stream->bufDone(frame->buf_idx);
1354     if ( err < 0) {
1355         LOGE("stream bufDone failed %d", err);
1356     }
1357 
1358     free(super_frame);
1359     LOGH("[KPI Perf] : END");
1360     return;
1361 }
1362 
1363 /*===========================================================================
1364  * FUNCTION   : video_stream_cb_routine
1365  *
1366  * DESCRIPTION: helper function to handle video frame from video stream
1367  *
1368  * PARAMETERS :
1369  *   @super_frame : received super buffer
1370  *   @stream      : stream object
1371  *   @userdata    : user data ptr
1372  *
1373  * RETURN    : None
1374  *
1375  * NOTE      : caller passes the ownership of super_frame, it's our
1376  *             responsibility to free super_frame once it's done. video
1377  *             frame will be sent to video encoder. Once video encoder is
1378  *             done with the video frame, it will call another API
1379  *             (release_recording_frame) to return the frame back
1380  *==========================================================================*/
video_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1381 void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1382                                                         QCameraStream *stream,
1383                                                         void *userdata)
1384 {
1385     ATRACE_CALL();
1386     QCameraVideoMemory *videoMemObj = NULL;
1387     camera_memory_t *video_mem = NULL;
1388     nsecs_t timeStamp = 0;
1389     bool triggerTCB = FALSE;
1390 
1391     LOGH("[KPI Perf] : BEGIN");
1392     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1393     if (pme == NULL ||
1394         pme->mCameraHandle == NULL ||
1395         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1396         LOGE("camera obj not valid");
1397         // simply free super frame
1398         free(super_frame);
1399         return;
1400     }
1401 
1402     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1403 
1404     if (pme->needDebugFps()) {
1405         pme->debugShowVideoFPS();
1406     }
1407     if(pme->m_bRecordStarted) {
1408        LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
1409        pme->m_bRecordStarted = false ;
1410     }
1411     LOGD("Stream(%d), Timestamp: %ld %ld",
1412           frame->stream_id,
1413           frame->ts.tv_sec,
1414           frame->ts.tv_nsec);
1415 
1416     if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
1417         if (pme->mParameters.getVideoBatchSize() == 0) {
1418             timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1419                     + frame->ts.tv_nsec;
1420             LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
1421                     timeStamp);
1422             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1423             videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1424             video_mem = NULL;
1425             if (NULL != videoMemObj) {
1426                 video_mem = videoMemObj->getMemory(frame->buf_idx,
1427                         (pme->mStoreMetaDataInFrame > 0)? true : false);
1428                 videoMemObj->updateNativeHandle(frame->buf_idx);
1429                 triggerTCB = TRUE;
1430             }
1431         } else {
1432             //Handle video batch callback
1433             native_handle_t *nh = NULL;
1434             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1435             QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1436             if ((stream->mCurMetaMemory == NULL)
1437                     || (stream->mCurBufIndex == -1)) {
1438                 //get Free metadata available
1439                 for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
1440                     if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
1441                         stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
1442                         stream->mCurBufIndex = 0;
1443                         stream->mCurMetaIndex = i;
1444                         stream->mStreamMetaMemory[i].numBuffers = 0;
1445                         break;
1446                     }
1447                 }
1448             }
1449             video_mem = stream->mCurMetaMemory;
1450             nh = videoMemObj->updateNativeHandle(stream->mCurMetaIndex);
1451             if (video_mem == NULL || nh == NULL) {
1452                 LOGE("No Free metadata. Drop this frame");
1453                 stream->mCurBufIndex = -1;
1454                 stream->bufDone(frame->buf_idx);
1455                 free(super_frame);
1456                 return;
1457             }
1458 
1459             int index = stream->mCurBufIndex;
1460             int fd_cnt = pme->mParameters.getVideoBatchSize();
1461             nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1462                     + frame->ts.tv_nsec;
1463             if (index == 0) {
1464                 stream->mFirstTimeStamp = frame_ts;
1465             }
1466 
1467             stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
1468                     = (uint8_t)frame->buf_idx;
1469             stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
1470             stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
1471                     = TRUE;
1472             /*
1473             * data[0] => FD
1474             * data[mNumFDs + 1] => OFFSET
1475             * data[mNumFDs + 2] => SIZE
1476             * data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1477             * data[mNumFDs + 4] => TIMESTAMP
1478             * data[mNumFDs + 5] => FORMAT
1479             */
1480             nh->data[index] = videoMemObj->getFd(frame->buf_idx);
1481             nh->data[index + fd_cnt] = 0;
1482             nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx);
1483             nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage();
1484             nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp);
1485             nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat();
1486             stream->mCurBufIndex++;
1487             if (stream->mCurBufIndex == fd_cnt) {
1488                 timeStamp = stream->mFirstTimeStamp;
1489                 LOGD("Video frame to encoder TimeStamp : %lld batch = %d",
1490                     timeStamp, fd_cnt);
1491                 stream->mCurBufIndex = -1;
1492                 stream->mCurMetaIndex = -1;
1493                 stream->mCurMetaMemory = NULL;
1494                 triggerTCB = TRUE;
1495             }
1496         }
1497     } else {
1498         videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1499         video_mem = NULL;
1500         native_handle_t *nh = NULL;
1501         int fd_cnt = frame->user_buf.bufs_used;
1502         if (NULL != videoMemObj) {
1503             video_mem = videoMemObj->getMemory(frame->buf_idx, true);
1504             nh = videoMemObj->updateNativeHandle(frame->buf_idx);
1505         } else {
1506             LOGE("videoMemObj NULL");
1507         }
1508 
1509         if (nh != NULL) {
1510             timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1511                     + frame->ts.tv_nsec;
1512             LOGD("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
1513                     timeStamp, frame->fd, frame->buf_idx, fd_cnt);
1514 
1515             for (int i = 0; i < fd_cnt; i++) {
1516                 if (frame->user_buf.buf_idx[i] >= 0) {
1517                     mm_camera_buf_def_t *plane_frame =
1518                             &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
1519                     QCameraVideoMemory *frameobj =
1520                             (QCameraVideoMemory *)plane_frame->mem_info;
1521                     int usage = frameobj->getUsage();
1522                     nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
1523                             + plane_frame->ts.tv_nsec;
1524                     /*
1525                        data[0] => FD
1526                        data[mNumFDs + 1] => OFFSET
1527                        data[mNumFDs + 2] => SIZE
1528                        data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1529                        data[mNumFDs + 4] => TIMESTAMP
1530                        data[mNumFDs + 5] => FORMAT
1531                     */
1532                     nh->data[i] = frameobj->getFd(plane_frame->buf_idx);
1533                     nh->data[fd_cnt + i] = 0;
1534                     nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx);
1535                     nh->data[(3 * fd_cnt) + i] = usage;
1536                     nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp);
1537                     nh->data[(5 * fd_cnt) + i] = frameobj->getFormat();
1538                     LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
1539                             (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
1540                     pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
1541                 }
1542             }
1543             triggerTCB = TRUE;
1544         } else {
1545             LOGE("No Video Meta Available. Return Buffer");
1546             stream->bufDone(super_frame->bufs[0]->buf_idx);
1547         }
1548     }
1549 
1550     if ((NULL != video_mem) && (triggerTCB == TRUE)) {
1551         if ((pme->mDataCbTimestamp != NULL) &&
1552             pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
1553             qcamera_callback_argm_t cbArg;
1554             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1555             cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
1556             cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
1557             cbArg.data = video_mem;
1558             cbArg.timestamp = timeStamp;
1559             int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1560             if (rc != NO_ERROR) {
1561                 LOGE("fail sending data notify");
1562                 stream->bufDone(frame->buf_idx);
1563             }
1564         }
1565     }
1566 
1567     free(super_frame);
1568     LOGH("[KPI Perf] : END");
1569 }
1570 
1571 /*===========================================================================
1572  * FUNCTION   : snapshot_channel_cb_routine
1573  *
1574  * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
1575  *
1576  * PARAMETERS :
1577  *   @super_frame : received super buffer
1578  *   @userdata    : user data ptr
1579  *
1580  * RETURN    : None
1581  *
1582  * NOTE      : recvd_frame will be released after this call by caller, so if
1583  *             async operation needed for recvd_frame, it's our responsibility
1584  *             to save a copy for this variable to be used later.
1585  *==========================================================================*/
snapshot_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1586 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1587        void *userdata)
1588 {
1589     ATRACE_CALL();
1590     char value[PROPERTY_VALUE_MAX];
1591     QCameraChannel *pChannel = NULL;
1592 
1593     LOGH("[KPI Perf]: E");
1594     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1595     if (pme == NULL ||
1596         pme->mCameraHandle == NULL ||
1597         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1598         LOGE("camera obj not valid");
1599         // simply free super frame
1600         free(super_frame);
1601         return;
1602     }
1603 
1604     if (pme->isLowPowerMode()) {
1605         pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
1606     } else {
1607         pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
1608     }
1609 
1610     if ((pChannel == NULL) || (pChannel->getMyHandle() != super_frame->ch_id)) {
1611         LOGE("Snapshot channel doesn't exist, return here");
1612         return;
1613     }
1614 
1615     property_get("persist.camera.dumpmetadata", value, "0");
1616     int32_t enabled = atoi(value);
1617     if (enabled) {
1618         if (pChannel == NULL ||
1619             pChannel->getMyHandle() != super_frame->ch_id) {
1620             LOGE("Capture channel doesn't exist, return here");
1621             return;
1622         }
1623         mm_camera_buf_def_t *pMetaFrame = NULL;
1624         QCameraStream *pStream = NULL;
1625         for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1626             pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1627             if (pStream != NULL) {
1628                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1629                     pMetaFrame = super_frame->bufs[i]; //find the metadata
1630                     if (pMetaFrame != NULL &&
1631                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1632                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
1633                     }
1634                     break;
1635                 }
1636             }
1637         }
1638     }
1639 
1640     // save a copy for the superbuf
1641     mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1642     if (frame == NULL) {
1643         LOGE("Error allocating memory to save received_frame structure.");
1644         pChannel->bufDone(super_frame);
1645         return;
1646     }
1647     *frame = *super_frame;
1648 
1649     if (frame->num_bufs > 0) {
1650         LOGI("[KPI Perf]: superbuf frame_idx %d",
1651                 frame->bufs[0]->frame_idx);
1652     }
1653 
1654     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1655             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1656         LOGE("Failed to trigger process data");
1657         pChannel->bufDone(super_frame);
1658         free(frame);
1659         frame = NULL;
1660         return;
1661     }
1662 
1663     LOGH("[KPI Perf]: X");
1664 }
1665 
1666 /*===========================================================================
1667  * FUNCTION   : raw_stream_cb_routine
1668  *
1669  * DESCRIPTION: helper function to handle raw dump frame from raw stream
1670  *
1671  * PARAMETERS :
1672  *   @super_frame : received super buffer
1673  *   @stream      : stream object
1674  *   @userdata    : user data ptr
1675  *
1676  * RETURN    : None
1677  *
1678  * NOTE      : caller passes the ownership of super_frame, it's our
1679  *             responsibility to free super_frame once it's done. For raw
1680  *             frame, there is no need to send to postprocessor for jpeg
1681  *             encoding. this function will play shutter and send the data
1682  *             callback to upper layer. Raw frame buffer will be returned
1683  *             back to kernel, and frame will be free after use.
1684  *==========================================================================*/
raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)1685 void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1686                                                       QCameraStream * /*stream*/,
1687                                                       void * userdata)
1688 {
1689     ATRACE_CALL();
1690     LOGH("[KPI Perf] : BEGIN");
1691     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1692     if (pme == NULL ||
1693         pme->mCameraHandle == NULL ||
1694         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1695         LOGE("camera obj not valid");
1696         // simply free super frame
1697         free(super_frame);
1698         return;
1699     }
1700 
1701     pme->m_postprocessor.processRawData(super_frame);
1702     LOGH("[KPI Perf] : END");
1703 }
1704 
1705 /*===========================================================================
1706  * FUNCTION   : raw_channel_cb_routine
1707  *
1708  * DESCRIPTION: helper function to handle RAW  superbuf callback directly from
1709  *              mm-camera-interface
1710  *
1711  * PARAMETERS :
1712  *   @super_frame : received super buffer
1713  *   @userdata    : user data ptr
1714  *
1715  * RETURN    : None
1716  *
1717  * NOTE      : recvd_frame will be released after this call by caller, so if
1718  *             async operation needed for recvd_frame, it's our responsibility
1719  *             to save a copy for this variable to be used later.
1720 *==========================================================================*/
raw_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1721 void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1722         void *userdata)
1723 
1724 {
1725     ATRACE_CALL();
1726     char value[PROPERTY_VALUE_MAX];
1727 
1728     LOGH("[KPI Perf]: E");
1729     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1730     if (pme == NULL ||
1731         pme->mCameraHandle == NULL ||
1732         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1733         LOGE("camera obj not valid");
1734         // simply free super frame
1735         free(super_frame);
1736         return;
1737     }
1738 
1739     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
1740     if (pChannel == NULL) {
1741         LOGE("RAW channel doesn't exist, return here");
1742         return;
1743     }
1744 
1745     if (pChannel->getMyHandle() != super_frame->ch_id) {
1746         LOGE("Invalid Input super buffer");
1747         pChannel->bufDone(super_frame);
1748         return;
1749     }
1750 
1751     property_get("persist.camera.dumpmetadata", value, "0");
1752     int32_t enabled = atoi(value);
1753     if (enabled) {
1754         mm_camera_buf_def_t *pMetaFrame = NULL;
1755         QCameraStream *pStream = NULL;
1756         for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1757             pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1758             if (pStream != NULL) {
1759                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1760                     pMetaFrame = super_frame->bufs[i]; //find the metadata
1761                     if (pMetaFrame != NULL &&
1762                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1763                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
1764                     }
1765                     break;
1766                 }
1767             }
1768         }
1769     }
1770 
1771     // save a copy for the superbuf
1772     mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1773     if (frame == NULL) {
1774         LOGE("Error allocating memory to save received_frame structure.");
1775         pChannel->bufDone(super_frame);
1776         return;
1777     }
1778     *frame = *super_frame;
1779 
1780     if (frame->num_bufs > 0) {
1781         LOGI("[KPI Perf]: superbuf frame_idx %d",
1782                 frame->bufs[0]->frame_idx);
1783     }
1784 
1785     // Wait on Postproc initialization if needed
1786     // then send to postprocessor
1787     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1788             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1789         LOGE("Failed to trigger process data");
1790         pChannel->bufDone(super_frame);
1791         free(frame);
1792         frame = NULL;
1793         return;
1794     }
1795 
1796     LOGH("[KPI Perf]: X");
1797 
1798 }
1799 
1800 /*===========================================================================
1801  * FUNCTION   : preview_raw_stream_cb_routine
1802  *
1803  * DESCRIPTION: helper function to handle raw frame during standard preview
1804  *
1805  * PARAMETERS :
1806  *   @super_frame : received super buffer
1807  *   @stream      : stream object
1808  *   @userdata    : user data ptr
1809  *
1810  * RETURN    : None
1811  *
1812  * NOTE      : caller passes the ownership of super_frame, it's our
1813  *             responsibility to free super_frame once it's done.
1814  *==========================================================================*/
preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1815 void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1816                                                               QCameraStream * stream,
1817                                                               void * userdata)
1818 {
1819     ATRACE_CALL();
1820     LOGH("[KPI Perf] : BEGIN");
1821     char value[PROPERTY_VALUE_MAX];
1822     bool dump_preview_raw = false, dump_video_raw = false;
1823 
1824     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1825     if (pme == NULL ||
1826         pme->mCameraHandle == NULL ||
1827         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1828         LOGE("camera obj not valid");
1829         // simply free super frame
1830         free(super_frame);
1831         return;
1832     }
1833 
1834     mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
1835 
1836     if (raw_frame != NULL) {
1837         property_get("persist.camera.preview_raw", value, "0");
1838         dump_preview_raw = atoi(value) > 0 ? true : false;
1839         property_get("persist.camera.video_raw", value, "0");
1840         dump_video_raw = atoi(value) > 0 ? true : false;
1841         if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
1842                 && dump_video_raw)) {
1843             pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1844         }
1845         stream->bufDone(raw_frame->buf_idx);
1846     }
1847     free(super_frame);
1848 
1849     LOGH("[KPI Perf] : END");
1850 }
1851 
1852 /*===========================================================================
1853  * FUNCTION   : snapshot_raw_stream_cb_routine
1854  *
1855  * DESCRIPTION: helper function to handle raw frame during standard capture
1856  *
1857  * PARAMETERS :
1858  *   @super_frame : received super buffer
1859  *   @stream      : stream object
1860  *   @userdata    : user data ptr
1861  *
1862  * RETURN    : None
1863  *
1864  * NOTE      : caller passes the ownership of super_frame, it's our
1865  *             responsibility to free super_frame once it's done.
1866  *==========================================================================*/
snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1867 void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1868                                                                QCameraStream * stream,
1869                                                                void * userdata)
1870 {
1871     ATRACE_CALL();
1872     LOGH("[KPI Perf] : BEGIN");
1873     char value[PROPERTY_VALUE_MAX];
1874     bool dump_raw = false;
1875 
1876     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1877     if (pme == NULL ||
1878         pme->mCameraHandle == NULL ||
1879         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1880         LOGE("camera obj not valid");
1881         // simply free super frame
1882         free(super_frame);
1883         return;
1884     }
1885 
1886     property_get("persist.camera.snapshot_raw", value, "0");
1887     dump_raw = atoi(value) > 0 ? true : false;
1888 
1889     for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1890         if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
1891             mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
1892             if (NULL != stream) {
1893                 if (dump_raw) {
1894                     pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1895                 }
1896                 stream->bufDone(super_frame->bufs[i]->buf_idx);
1897             }
1898             break;
1899         }
1900     }
1901 
1902     free(super_frame);
1903 
1904     LOGH("[KPI Perf] : END");
1905 }
1906 
1907 /*===========================================================================
1908  * FUNCTION   : updateMetadata
1909  *
1910  * DESCRIPTION: Frame related parameter can be updated here
1911  *
1912  * PARAMETERS :
1913  *   @pMetaData : pointer to metadata buffer
1914  *
1915  * RETURN     : int32_t type of status
1916  *              NO_ERROR  -- success
1917  *              none-zero failure code
1918  *==========================================================================*/
updateMetadata(metadata_buffer_t * pMetaData)1919 int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
1920 {
1921     int32_t rc = NO_ERROR;
1922 
1923     if (pMetaData == NULL) {
1924         LOGE("Null Metadata buffer");
1925         return rc;
1926     }
1927 
1928     // Sharpness
1929     cam_edge_application_t edge_application;
1930     memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
1931     edge_application.sharpness = mParameters.getSharpness();
1932     if (edge_application.sharpness != 0) {
1933         edge_application.edge_mode = CAM_EDGE_MODE_FAST;
1934     } else {
1935         edge_application.edge_mode = CAM_EDGE_MODE_OFF;
1936     }
1937     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1938             CAM_INTF_META_EDGE_MODE, edge_application);
1939 
1940     //Effect
1941     int32_t prmEffect = mParameters.getEffect();
1942     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
1943 
1944     //flip
1945     int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
1946     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
1947 
1948     //denoise
1949     uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
1950     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1951             CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
1952 
1953     //rotation & device rotation
1954     uint32_t prmRotation = mParameters.getJpegRotation();
1955     cam_rotation_info_t rotation_info;
1956     memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
1957     if (prmRotation == 0) {
1958        rotation_info.rotation = ROTATE_0;
1959     } else if (prmRotation == 90) {
1960        rotation_info.rotation = ROTATE_90;
1961     } else if (prmRotation == 180) {
1962        rotation_info.rotation = ROTATE_180;
1963     } else if (prmRotation == 270) {
1964        rotation_info.rotation = ROTATE_270;
1965     }
1966 
1967     uint32_t device_rotation = mParameters.getDeviceRotation();
1968     if (device_rotation == 0) {
1969         rotation_info.device_rotation = ROTATE_0;
1970     } else if (device_rotation == 90) {
1971         rotation_info.device_rotation = ROTATE_90;
1972     } else if (device_rotation == 180) {
1973         rotation_info.device_rotation = ROTATE_180;
1974     } else if (device_rotation == 270) {
1975         rotation_info.device_rotation = ROTATE_270;
1976     } else {
1977         rotation_info.device_rotation = ROTATE_0;
1978     }
1979 
1980     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
1981 
1982     // Imglib Dynamic Scene Data
1983     cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
1984     if (mParameters.isStillMoreEnabled()) {
1985         cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
1986         dyn_img_data.input_count = stillmore_cap.burst_count;
1987     }
1988     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1989             CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
1990 
1991     //CPP CDS
1992     int32_t prmCDSMode = mParameters.getCDSMode();
1993     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1994             CAM_INTF_PARM_CDS_MODE, prmCDSMode);
1995 
1996     return rc;
1997 }
1998 
1999 /*===========================================================================
2000  * FUNCTION   : metadata_stream_cb_routine
2001  *
2002  * DESCRIPTION: helper function to handle metadata frame from metadata stream
2003  *
2004  * PARAMETERS :
2005  *   @super_frame : received super buffer
2006  *   @stream      : stream object
2007  *   @userdata    : user data ptr
2008  *
2009  * RETURN    : None
2010  *
2011  * NOTE      : caller passes the ownership of super_frame, it's our
2012  *             responsibility to free super_frame once it's done. Metadata
2013  *             could have valid entries for face detection result or
2014  *             histogram statistics information.
2015  *==========================================================================*/
metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2016 void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2017                                                            QCameraStream * stream,
2018                                                            void * userdata)
2019 {
2020     ATRACE_CALL();
2021     LOGD("[KPI Perf] : BEGIN");
2022     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2023     if (pme == NULL ||
2024         pme->mCameraHandle == NULL ||
2025         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2026         LOGE("camera obj not valid");
2027         // simply free super frame
2028         free(super_frame);
2029         return;
2030     }
2031 
2032     mm_camera_buf_def_t *frame = super_frame->bufs[0];
2033     metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
2034     if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
2035        !pme->mLongshotEnabled) {
2036        //Make shutter call back in non ZSL mode once raw frame is received from VFE.
2037        pme->playShutter();
2038     }
2039 
2040     if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
2041         //Dump Tuning data for video
2042         pme->dumpMetadataToFile(stream,frame,(char *)"Video");
2043     }
2044 
2045     IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
2046         // process histogram statistics info
2047         qcamera_sm_internal_evt_payload_t *payload =
2048             (qcamera_sm_internal_evt_payload_t *)
2049                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2050         if (NULL != payload) {
2051             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2052             payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
2053             payload->stats_data = *stats_data;
2054             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2055             if (rc != NO_ERROR) {
2056                 LOGW("processEvt histogram failed");
2057                 free(payload);
2058                 payload = NULL;
2059 
2060             }
2061         } else {
2062             LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
2063         }
2064     }
2065 
2066     IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
2067             CAM_INTF_META_FACE_DETECTION, pMetaData) {
2068 
2069         cam_faces_data_t faces_data;
2070         pme->fillFacesData(faces_data, pMetaData);
2071         faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
2072 
2073         qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2074             malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2075         if (NULL != payload) {
2076             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2077             payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
2078             payload->faces_data = faces_data;
2079             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2080             if (rc != NO_ERROR) {
2081                 LOGW("processEvt face detection failed");
2082                 free(payload);
2083                 payload = NULL;
2084             }
2085         } else {
2086             LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
2087         }
2088     }
2089 
2090     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
2091         uint8_t forceAFUpdate = FALSE;
2092         //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
2093         //upper layers. But in scenarios where metadata drops especially which contain important
2094         //AF information, APP will wait indefinitely for focus result resulting in capture hang.
2095         //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
2096         //This will help overcome metadata drop issue with the earlier approach.
2097         //3. But sometimes AF state transitions can happen so fast within same metadata due to
2098         //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
2099         //state depending on the state transitions happened (for example state A -> B -> A).
2100         //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
2101         //we check state transition at both HAL level and AF module level. We rely on
2102         //'state transition' meta field set by AF module for the state transition detected by it.
2103         IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
2104             forceAFUpdate = *stateChange;
2105         }
2106         //This is a special scenario in which when scene modes like landscape are selected, AF mode
2107         //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
2108         //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
2109         //change here and trigger AF callback @ processAutoFocusEvent().
2110         IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2111             if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
2112                     pme->mActiveAF){
2113                 forceAFUpdate = TRUE;
2114             }
2115         }
2116         if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
2117             cam_af_state_t prevFocusState = pme->m_currentFocusState;
2118             pme->m_currentFocusState = (cam_af_state_t)(*afState);
2119             qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2120                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2121             if (NULL != payload) {
2122                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2123                 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
2124                 payload->focus_data.focus_state = (cam_af_state_t)(*afState);
2125                 //Need to flush ZSL Q only if we are transitioning from scanning state
2126                 //to focused/not focused state.
2127                 payload->focus_data.flush_info.needFlush =
2128                         ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
2129                         (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
2130                         ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
2131                         (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
2132                 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
2133 
2134                 IF_META_AVAILABLE(float, focusDistance,
2135                         CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
2136                     payload->focus_data.focus_dist.
2137                     focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
2138                 }
2139                 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
2140                     payload->focus_data.focus_dist.
2141                             focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
2142                     payload->focus_data.focus_dist.
2143                             focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
2144                 }
2145                 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2146                     payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
2147                 }
2148                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2149                 if (rc != NO_ERROR) {
2150                     LOGW("processEvt focus failed");
2151                     free(payload);
2152                     payload = NULL;
2153                 }
2154             } else {
2155                 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2156             }
2157         }
2158     }
2159 
2160     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
2161         if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
2162             LOGE("Invalid num_of_streams %d in crop_data",
2163                 crop_data->num_of_streams);
2164         } else {
2165             qcamera_sm_internal_evt_payload_t *payload =
2166                 (qcamera_sm_internal_evt_payload_t *)
2167                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2168             if (NULL != payload) {
2169                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2170                 payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
2171                 payload->crop_data = *crop_data;
2172                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2173                 if (rc != NO_ERROR) {
2174                     LOGE("processEvt crop info failed");
2175                     free(payload);
2176                     payload = NULL;
2177                 }
2178             } else {
2179                 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2180             }
2181         }
2182     }
2183 
2184     IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
2185             CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
2186         qcamera_sm_internal_evt_payload_t *payload =
2187         (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2188         if (NULL != payload) {
2189             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2190             payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
2191             payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
2192             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2193             if (rc != NO_ERROR) {
2194                 LOGW("processEvt prep_snapshot failed");
2195                 free(payload);
2196                 payload = NULL;
2197             }
2198         } else {
2199             LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2200         }
2201     }
2202 
2203     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
2204             CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
2205         LOGH("hdr_scene_data: %d %f\n",
2206                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
2207         //Handle this HDR meta data only if capture is not in process
2208         if (!pme->m_stateMachine.isCaptureRunning()) {
2209             qcamera_sm_internal_evt_payload_t *payload =
2210                     (qcamera_sm_internal_evt_payload_t *)
2211                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2212             if (NULL != payload) {
2213                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2214                 payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
2215                 payload->hdr_data = *hdr_scene_data;
2216                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2217                 if (rc != NO_ERROR) {
2218                     LOGW("processEvt hdr update failed");
2219                     free(payload);
2220                     payload = NULL;
2221                 }
2222             } else {
2223                 LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
2224             }
2225         }
2226     }
2227 
2228     IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
2229             CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
2230         qcamera_sm_internal_evt_payload_t *payload =
2231             (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2232         if (NULL != payload) {
2233             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2234             payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
2235             payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
2236             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2237             if (rc != NO_ERROR) {
2238                 LOGW("processEvt asd_update failed");
2239                 free(payload);
2240                 payload = NULL;
2241             }
2242         } else {
2243             LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
2244         }
2245     }
2246 
2247     IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
2248         LOGH(", metadata for awb params.");
2249         qcamera_sm_internal_evt_payload_t *payload =
2250                 (qcamera_sm_internal_evt_payload_t *)
2251                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2252         if (NULL != payload) {
2253             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2254             payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
2255             payload->awb_data = *awb_params;
2256             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2257             if (rc != NO_ERROR) {
2258                 LOGW("processEvt awb_update failed");
2259                 free(payload);
2260                 payload = NULL;
2261             }
2262         } else {
2263             LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
2264         }
2265     }
2266 
2267     IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
2268         pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
2269     }
2270 
2271     IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
2272         pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
2273     }
2274 
2275     IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
2276         pme->mExifParams.sensor_params.aperture_value = *aperture_value;
2277     }
2278 
2279     IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
2280         pme->mExifParams.cam_3a_params = *ae_params;
2281         pme->mExifParams.cam_3a_params_valid = TRUE;
2282         pme->mFlashNeeded = ae_params->flash_needed;
2283         pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
2284         qcamera_sm_internal_evt_payload_t *payload =
2285                 (qcamera_sm_internal_evt_payload_t *)
2286                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2287         if (NULL != payload) {
2288             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2289             payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
2290             payload->ae_data = *ae_params;
2291             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2292             if (rc != NO_ERROR) {
2293                 LOGW("processEvt ae_update failed");
2294                 free(payload);
2295                 payload = NULL;
2296             }
2297         } else {
2298             LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
2299         }
2300     }
2301 
2302     IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
2303         pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
2304     }
2305 
2306     IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
2307         pme->mExifParams.sensor_params = *sensor_params;
2308     }
2309 
2310     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
2311             CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
2312         if (pme->mExifParams.debug_params) {
2313             pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
2314             pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
2315         }
2316     }
2317 
2318     IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
2319             CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
2320         if (pme->mExifParams.debug_params) {
2321             pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
2322             pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
2323         }
2324     }
2325 
2326     IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
2327             CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
2328         if (pme->mExifParams.debug_params) {
2329             pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
2330             pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
2331         }
2332     }
2333 
2334     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
2335             CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
2336         if (pme->mExifParams.debug_params) {
2337             pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
2338             pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
2339         }
2340     }
2341 
2342     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
2343             CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
2344         if (pme->mExifParams.debug_params) {
2345             pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
2346             pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
2347         }
2348     }
2349 
2350     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
2351             CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
2352         if (pme->mExifParams.debug_params) {
2353             pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
2354             pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
2355         }
2356     }
2357 
2358     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
2359             CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
2360         if (pme->mExifParams.debug_params) {
2361             pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
2362             pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
2363         }
2364     }
2365 
2366     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
2367             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
2368         if (pme->mExifParams.debug_params) {
2369             pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
2370             pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
2371         }
2372     }
2373 
2374     IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
2375         qcamera_sm_internal_evt_payload_t *payload =
2376                 (qcamera_sm_internal_evt_payload_t *)
2377                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2378         if (NULL != payload) {
2379             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2380             payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
2381             payload->led_data = (cam_flash_mode_t)*led_mode;
2382             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2383             if (rc != NO_ERROR) {
2384                 LOGW("processEvt led mode override failed");
2385                 free(payload);
2386                 payload = NULL;
2387             }
2388         } else {
2389             LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2390         }
2391     }
2392 
2393     cam_edge_application_t edge_application;
2394     memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
2395     edge_application.sharpness = pme->mParameters.getSharpness();
2396     if (edge_application.sharpness != 0) {
2397         edge_application.edge_mode = CAM_EDGE_MODE_FAST;
2398     } else {
2399         edge_application.edge_mode = CAM_EDGE_MODE_OFF;
2400     }
2401     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
2402 
2403     IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
2404             CAM_INTF_META_FOCUS_POSITION, pMetaData) {
2405         qcamera_sm_internal_evt_payload_t *payload =
2406             (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2407         if (NULL != payload) {
2408             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2409             payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
2410             payload->focus_pos = *cur_pos_info;
2411             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2412             if (rc != NO_ERROR) {
2413                 LOGW("processEvt focus_pos_update failed");
2414                 free(payload);
2415                 payload = NULL;
2416             }
2417         } else {
2418             LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
2419         }
2420     }
2421 
2422     if (pme->mParameters.getLowLightCapture()) {
2423         IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
2424                 CAM_INTF_META_LOW_LIGHT, pMetaData) {
2425             pme->mParameters.setLowLightLevel(*low_light_level);
2426         }
2427     }
2428 
2429     IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
2430             CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
2431         pme->mParameters.setDynamicImgData(*dyn_img_data);
2432     }
2433 
2434     IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
2435       LOGD("touch_ae_status: %d", *touch_ae_status);
2436     }
2437 
2438     stream->bufDone(frame->buf_idx);
2439     free(super_frame);
2440 
2441     LOGD("[KPI Perf] : END");
2442 }
2443 
2444 /*===========================================================================
2445  * FUNCTION   : reprocess_stream_cb_routine
2446  *
2447  * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
2448                 (after reprocess, e.g., ZSL snapshot frame after WNR if
2449  *              WNR is enabled)
2450  *
2451  * PARAMETERS :
2452  *   @super_frame : received super buffer
2453  *   @stream      : stream object
2454  *   @userdata    : user data ptr
2455  *
2456  * RETURN    : None
2457  *
2458  * NOTE      : caller passes the ownership of super_frame, it's our
2459  *             responsibility to free super_frame once it's done. In this
2460  *             case, reprocessed frame need to be passed to postprocessor
2461  *             for jpeg encoding.
2462  *==========================================================================*/
reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)2463 void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2464                                                             QCameraStream * /*stream*/,
2465                                                             void * userdata)
2466 {
2467     ATRACE_CALL();
2468     LOGH("[KPI Perf]: E");
2469     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2470     if (pme == NULL ||
2471         pme->mCameraHandle == NULL ||
2472         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2473         LOGE("camera obj not valid");
2474         // simply free super frame
2475         free(super_frame);
2476         return;
2477     }
2478 
2479     pme->m_postprocessor.processPPData(super_frame);
2480 
2481     LOGH("[KPI Perf]: X");
2482 }
2483 
2484 /*===========================================================================
2485  * FUNCTION   : callback_stream_cb_routine
2486  *
2487  * DESCRIPTION: function to process CALBACK stream data
2488                            Frame will processed and sent to framework
2489  *
2490  * PARAMETERS :
2491  *   @super_frame : received super buffer
2492  *   @stream      : stream object
2493  *   @userdata    : user data ptr
2494  *
2495  * RETURN    : None
2496  *==========================================================================*/
callback_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2497 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
2498         QCameraStream *stream, void *userdata)
2499 {
2500     ATRACE_CALL();
2501     LOGH("[KPI Perf]: E");
2502     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2503 
2504     if (pme == NULL ||
2505             pme->mCameraHandle == NULL ||
2506             pme->mCameraHandle->camera_handle != super_frame->camera_handle) {
2507         LOGE("camera obj not valid");
2508         // simply free super frame
2509         free(super_frame);
2510         return;
2511     }
2512 
2513     mm_camera_buf_def_t *frame = super_frame->bufs[0];
2514     if (NULL == frame) {
2515         LOGE("preview callback frame is NULL");
2516         free(super_frame);
2517         return;
2518     }
2519 
2520     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
2521         LOGH("preview is not running, no need to process");
2522         stream->bufDone(frame->buf_idx);
2523         free(super_frame);
2524         return;
2525     }
2526 
2527     QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
2528     // Handle preview data callback
2529     if (pme->mDataCb != NULL &&
2530             (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
2531             (!pme->mParameters.isSceneSelectionEnabled())) {
2532         int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
2533         if (NO_ERROR != rc) {
2534             LOGE("Preview callback was not sent succesfully");
2535         }
2536     }
2537     stream->bufDone(frame->buf_idx);
2538     free(super_frame);
2539     LOGH("[KPI Perf]: X");
2540 }
2541 
2542 /*===========================================================================
2543  * FUNCTION   : dumpFrameToFile
2544  *
2545  * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
2546  *
2547  * PARAMETERS :
2548  *    @data : data ptr
2549  *    @size : length of data buffer
2550  *    @index : identifier for data
2551  *
2552  * RETURN     : None
2553  *==========================================================================*/
dumpJpegToFile(const void * data,size_t size,uint32_t index)2554 void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
2555         size_t size, uint32_t index)
2556 {
2557     char value[PROPERTY_VALUE_MAX];
2558     property_get("persist.camera.dumpimg", value, "0");
2559     uint32_t enabled = (uint32_t) atoi(value);
2560     uint32_t frm_num = 0;
2561     uint32_t skip_mode = 0;
2562 
2563     char buf[32];
2564     cam_dimension_t dim;
2565     memset(buf, 0, sizeof(buf));
2566     memset(&dim, 0, sizeof(dim));
2567 
2568     if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
2569         ((true == m_bIntJpegEvtPending) && data)) {
2570         frm_num = ((enabled & 0xffff0000) >> 16);
2571         if(frm_num == 0) {
2572             frm_num = 10; //default 10 frames
2573         }
2574         if(frm_num > 256) {
2575             frm_num = 256; //256 buffers cycle around
2576         }
2577         skip_mode = ((enabled & 0x0000ff00) >> 8);
2578         if(skip_mode == 0) {
2579             skip_mode = 1; //no-skip
2580         }
2581 
2582         if( mDumpSkipCnt % skip_mode == 0) {
2583             if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
2584                 // reset frame count if cycling
2585                 mDumpFrmCnt = 0;
2586             }
2587             if (mDumpFrmCnt <= frm_num) {
2588                 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
2589                         mDumpFrmCnt, index);
2590                 if (true == m_bIntJpegEvtPending) {
2591                     strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
2592                     mBackendFileSize = size;
2593                 }
2594 
2595                 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
2596                 if (file_fd >= 0) {
2597                     ssize_t written_len = write(file_fd, data, size);
2598                     fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2599                     LOGH("written number of bytes %zd\n",
2600                              written_len);
2601                     close(file_fd);
2602                 } else {
2603                     LOGE("fail to open file for image dumping");
2604                 }
2605                 if (false == m_bIntJpegEvtPending) {
2606                     mDumpFrmCnt++;
2607                 }
2608             }
2609         }
2610         mDumpSkipCnt++;
2611     }
2612 }
2613 
2614 
dumpMetadataToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,char * type)2615 void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
2616                                                    mm_camera_buf_def_t *frame,char *type)
2617 {
2618     char value[PROPERTY_VALUE_MAX];
2619     uint32_t frm_num = 0;
2620     metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
2621     property_get("persist.camera.dumpmetadata", value, "0");
2622     uint32_t enabled = (uint32_t) atoi(value);
2623     if (stream == NULL) {
2624         LOGH("No op");
2625         return;
2626     }
2627 
2628     uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
2629     if(enabled){
2630         frm_num = ((enabled & 0xffff0000) >> 16);
2631         if (frm_num == 0) {
2632             frm_num = 10; //default 10 frames
2633         }
2634         if (frm_num > 256) {
2635             frm_num = 256; //256 buffers cycle around
2636         }
2637         if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2638             // reset frame count if cycling
2639             dumpFrmCnt = 0;
2640         }
2641         LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
2642         if (dumpFrmCnt < frm_num) {
2643             char timeBuf[128];
2644             char buf[32];
2645             memset(buf, 0, sizeof(buf));
2646             memset(timeBuf, 0, sizeof(timeBuf));
2647             time_t current_time;
2648             struct tm * timeinfo;
2649             time (&current_time);
2650             timeinfo = localtime (&current_time);
2651             if (NULL != timeinfo) {
2652                 strftime(timeBuf, sizeof(timeBuf),
2653                         QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2654             }
2655             String8 filePath(timeBuf);
2656             snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
2657             filePath.append(buf);
2658             int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2659             if (file_fd >= 0) {
2660                 ssize_t written_len = 0;
2661                 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
2662                 void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
2663                 written_len += write(file_fd, data, sizeof(uint32_t));
2664                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
2665                 LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
2666                 written_len += write(file_fd, data, sizeof(uint32_t));
2667                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
2668                 LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
2669                 written_len += write(file_fd, data, sizeof(uint32_t));
2670                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
2671                 LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
2672                 written_len += write(file_fd, data, sizeof(uint32_t));
2673                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
2674                 LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
2675                 written_len += write(file_fd, data, sizeof(uint32_t));
2676                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
2677                 LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
2678                 written_len += write(file_fd, data, sizeof(uint32_t));
2679                 size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
2680                 data = (void *)((uint8_t *)&metadata->tuning_params.data);
2681                 written_len += write(file_fd, data, total_size);
2682                 total_size = metadata->tuning_params.tuning_vfe_data_size;
2683                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
2684                 written_len += write(file_fd, data, total_size);
2685                 total_size = metadata->tuning_params.tuning_cpp_data_size;
2686                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
2687                 written_len += write(file_fd, data, total_size);
2688                 total_size = metadata->tuning_params.tuning_cac_data_size;
2689                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
2690                 written_len += write(file_fd, data, total_size);
2691                 close(file_fd);
2692             }else {
2693                 LOGE("fail t open file for image dumping");
2694             }
2695             dumpFrmCnt++;
2696         }
2697     }
2698     stream->mDumpMetaFrame = dumpFrmCnt;
2699 }
2700 /*===========================================================================
2701  * FUNCTION   : dumpFrameToFile
2702  *
2703  * DESCRIPTION: helper function to dump frame into file for debug purpose.
2704  *
2705  * PARAMETERS :
2706  *    @data : data ptr
2707  *    @size : length of data buffer
2708  *    @index : identifier for data
2709  *    @dump_type : type of the frame to be dumped. Only such
2710  *                 dump type is enabled, the frame will be
2711  *                 dumped into a file.
2712  *
2713  * RETURN     : None
2714  *==========================================================================*/
dumpFrameToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,uint32_t dump_type,const char * misc)2715 void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
2716         mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
2717 {
2718     char value[PROPERTY_VALUE_MAX];
2719     property_get("persist.camera.dumpimg", value, "0");
2720     uint32_t enabled = (uint32_t) atoi(value);
2721     uint32_t frm_num = 0;
2722     uint32_t skip_mode = 0;
2723 
2724     if (NULL == stream) {
2725         LOGE("stream object is null");
2726         return;
2727     }
2728 
2729     uint32_t dumpFrmCnt = stream->mDumpFrame;
2730 
2731     if (true == m_bIntRawEvtPending) {
2732         enabled = QCAMERA_DUMP_FRM_RAW;
2733     }
2734 
2735     if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
2736         if((enabled & dump_type) && stream && frame) {
2737             frm_num = ((enabled & 0xffff0000) >> 16);
2738             if(frm_num == 0) {
2739                 frm_num = 10; //default 10 frames
2740             }
2741             if(frm_num > 256) {
2742                 frm_num = 256; //256 buffers cycle around
2743             }
2744             skip_mode = ((enabled & 0x0000ff00) >> 8);
2745             if(skip_mode == 0) {
2746                 skip_mode = 1; //no-skip
2747             }
2748             if(stream->mDumpSkipCnt == 0)
2749                 stream->mDumpSkipCnt = 1;
2750 
2751             if( stream->mDumpSkipCnt % skip_mode == 0) {
2752                 if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2753                     // reset frame count if cycling
2754                     dumpFrmCnt = 0;
2755                 }
2756                 if (dumpFrmCnt <= frm_num) {
2757                     char buf[32];
2758                     char timeBuf[128];
2759                     time_t current_time;
2760                     struct tm * timeinfo;
2761 
2762                     memset(timeBuf, 0, sizeof(timeBuf));
2763 
2764                     time (&current_time);
2765                     timeinfo = localtime (&current_time);
2766                     memset(buf, 0, sizeof(buf));
2767 
2768                     cam_dimension_t dim;
2769                     memset(&dim, 0, sizeof(dim));
2770                     stream->getFrameDimension(dim);
2771 
2772                     cam_frame_len_offset_t offset;
2773                     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2774                     stream->getFrameOffset(offset);
2775 
2776                     if (NULL != timeinfo) {
2777                         strftime(timeBuf, sizeof(timeBuf),
2778                                 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2779                     }
2780                     String8 filePath(timeBuf);
2781                     switch (dump_type) {
2782                     case QCAMERA_DUMP_FRM_PREVIEW:
2783                         {
2784                             snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
2785                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2786                         }
2787                         break;
2788                     case QCAMERA_DUMP_FRM_THUMBNAIL:
2789                         {
2790                             snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
2791                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2792                         }
2793                         break;
2794                     case QCAMERA_DUMP_FRM_SNAPSHOT:
2795                         {
2796                             if (!mParameters.isPostProcScaling()) {
2797                                 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2798                             } else {
2799                                 stream->getFrameDimension(dim);
2800                             }
2801                             if (misc != NULL) {
2802                                 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
2803                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2804                             } else {
2805                                 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
2806                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2807                             }
2808                         }
2809                         break;
2810                     case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
2811                         {
2812                             stream->getFrameDimension(dim);
2813                             if (misc != NULL) {
2814                                 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
2815                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2816                             } else {
2817                                 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
2818                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2819                             }
2820                         }
2821                         break;
2822                     case QCAMERA_DUMP_FRM_VIDEO:
2823                         {
2824                             snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
2825                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2826                         }
2827                         break;
2828                     case QCAMERA_DUMP_FRM_RAW:
2829                         {
2830                             mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
2831                             snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
2832                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2833                         }
2834                         break;
2835                     case QCAMERA_DUMP_FRM_JPEG:
2836                         {
2837                             mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2838                             snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
2839                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2840                         }
2841                         break;
2842                     default:
2843                         LOGE("Not supported for dumping stream type %d",
2844                                dump_type);
2845                         return;
2846                     }
2847 
2848                     filePath.append(buf);
2849                     int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2850                     ssize_t written_len = 0;
2851                     if (file_fd >= 0) {
2852                         void *data = NULL;
2853 
2854                         fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2855                         for (uint32_t i = 0; i < offset.num_planes; i++) {
2856                             uint32_t index = offset.mp[i].offset;
2857                             if (i > 0) {
2858                                 index += offset.mp[i-1].len;
2859                             }
2860 
2861                             if (offset.mp[i].meta_len != 0) {
2862                                 data = (void *)((uint8_t *)frame->buffer + index);
2863                                 written_len += write(file_fd, data,
2864                                         (size_t)offset.mp[i].meta_len);
2865                                 index += (uint32_t)offset.mp[i].meta_len;
2866                             }
2867 
2868                             for (int j = 0; j < offset.mp[i].height; j++) {
2869                                 data = (void *)((uint8_t *)frame->buffer + index);
2870                                 written_len += write(file_fd, data,
2871                                         (size_t)offset.mp[i].width);
2872                                 index += (uint32_t)offset.mp[i].stride;
2873                             }
2874                         }
2875 
2876                         LOGH("written number of bytes %zd\n",
2877                              written_len);
2878                         close(file_fd);
2879                     } else {
2880                         LOGE("fail to open file for image dumping");
2881                     }
2882                     if (true == m_bIntRawEvtPending) {
2883                         strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
2884                         mBackendFileSize = (size_t)written_len;
2885                     } else {
2886                         dumpFrmCnt++;
2887                     }
2888                 }
2889             }
2890             stream->mDumpSkipCnt++;
2891         }
2892     } else {
2893         dumpFrmCnt = 0;
2894     }
2895     stream->mDumpFrame = dumpFrmCnt;
2896 }
2897 
2898 /*===========================================================================
2899  * FUNCTION   : debugShowVideoFPS
2900  *
2901  * DESCRIPTION: helper function to log video frame FPS for debug purpose.
2902  *
2903  * PARAMETERS : None
2904  *
2905  * RETURN     : None
2906  *==========================================================================*/
debugShowVideoFPS()2907 void QCamera2HardwareInterface::debugShowVideoFPS()
2908 {
2909     mVFrameCount++;
2910     nsecs_t now = systemTime();
2911     nsecs_t diff = now - mVLastFpsTime;
2912     if (diff > ms2ns(250)) {
2913         mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
2914                 (double)(s2ns(1))) / (double)diff;
2915         LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
2916                 mVFps, mCameraId);
2917         mVLastFpsTime = now;
2918         mVLastFrameCount = mVFrameCount;
2919     }
2920 }
2921 
2922 /*===========================================================================
2923  * FUNCTION   : debugShowPreviewFPS
2924  *
2925  * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
2926  *
2927  * PARAMETERS : None
2928  *
2929  * RETURN     : None
2930  *==========================================================================*/
debugShowPreviewFPS()2931 void QCamera2HardwareInterface::debugShowPreviewFPS()
2932 {
2933     mPFrameCount++;
2934     nsecs_t now = systemTime();
2935     nsecs_t diff = now - mPLastFpsTime;
2936     if (diff > ms2ns(250)) {
2937         mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
2938                 (double)(s2ns(1))) / (double)diff;
2939         LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
2940                  mPFps, mCameraId);
2941         mPLastFpsTime = now;
2942         mPLastFrameCount = mPFrameCount;
2943     }
2944 }
2945 
2946 /*===========================================================================
2947  * FUNCTION   : fillFacesData
2948  *
2949  * DESCRIPTION: helper function to fill in face related metadata into a struct.
2950  *
2951  * PARAMETERS :
2952  *   @faces_data : face features data to be filled
2953  *   @metadata   : metadata structure to read face features from
2954  *
2955  * RETURN     : None
2956  *==========================================================================*/
fillFacesData(cam_faces_data_t & faces_data,metadata_buffer_t * metadata)2957 void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
2958         metadata_buffer_t *metadata)
2959 {
2960     memset(&faces_data, 0, sizeof(cam_faces_data_t));
2961 
2962     IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
2963             CAM_INTF_META_FACE_DETECTION, metadata) {
2964         faces_data.detection_data = *p_detection_data;
2965         if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
2966             faces_data.detection_data.num_faces_detected = MAX_ROI;
2967         }
2968 
2969         LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
2970                 faces_data.detection_data.num_faces_detected);
2971 
2972         IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
2973                 CAM_INTF_META_FACE_RECOG, metadata) {
2974             faces_data.recog_valid = true;
2975             faces_data.recog_data = *p_recog_data;
2976         }
2977 
2978         IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
2979                 CAM_INTF_META_FACE_BLINK, metadata) {
2980             faces_data.blink_valid = true;
2981             faces_data.blink_data = *p_blink_data;
2982         }
2983 
2984         IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
2985                 CAM_INTF_META_FACE_GAZE, metadata) {
2986             faces_data.gaze_valid = true;
2987             faces_data.gaze_data = *p_gaze_data;
2988         }
2989 
2990         IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
2991                 CAM_INTF_META_FACE_SMILE, metadata) {
2992             faces_data.smile_valid = true;
2993             faces_data.smile_data = *p_smile_data;
2994         }
2995 
2996         IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
2997                 CAM_INTF_META_FACE_LANDMARK, metadata) {
2998             faces_data.landmark_valid = true;
2999             faces_data.landmark_data = *p_landmarks;
3000         }
3001 
3002         IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
3003                 CAM_INTF_META_FACE_CONTOUR, metadata) {
3004             faces_data.contour_valid = true;
3005             faces_data.contour_data = *p_contour;
3006         }
3007     }
3008 }
3009 
3010 /*===========================================================================
3011  * FUNCTION   : ~QCameraCbNotifier
3012  *
3013  * DESCRIPTION: Destructor for exiting the callback context.
3014  *
3015  * PARAMETERS : None
3016  *
3017  * RETURN     : None
3018  *==========================================================================*/
~QCameraCbNotifier()3019 QCameraCbNotifier::~QCameraCbNotifier()
3020 {
3021 }
3022 
3023 /*===========================================================================
3024  * FUNCTION   : exit
3025  *
3026  * DESCRIPTION: exit notify thread.
3027  *
3028  * PARAMETERS : None
3029  *
3030  * RETURN     : None
3031  *==========================================================================*/
exit()3032 void QCameraCbNotifier::exit()
3033 {
3034     mActive = false;
3035     mProcTh.exit();
3036 }
3037 
3038 /*===========================================================================
3039  * FUNCTION   : releaseNotifications
3040  *
3041  * DESCRIPTION: callback for releasing data stored in the callback queue.
3042  *
3043  * PARAMETERS :
3044  *   @data      : data to be released
3045  *   @user_data : context data
3046  *
3047  * RETURN     : None
3048  *==========================================================================*/
releaseNotifications(void * data,void * user_data)3049 void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
3050 {
3051     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3052 
3053     if ( ( NULL != arg ) && ( NULL != user_data ) ) {
3054         if ( arg->release_cb ) {
3055             arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
3056         }
3057     }
3058 }
3059 
3060 /*===========================================================================
3061  * FUNCTION   : matchSnapshotNotifications
3062  *
3063  * DESCRIPTION: matches snapshot data callbacks
3064  *
3065  * PARAMETERS :
3066  *   @data      : data to match
3067  *   @user_data : context data
3068  *
3069  * RETURN     : bool match
3070  *              true - match found
3071  *              false- match not found
3072  *==========================================================================*/
matchSnapshotNotifications(void * data,void *)3073 bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
3074                                                    void */*user_data*/)
3075 {
3076     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3077     if ( NULL != arg ) {
3078         if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
3079             return true;
3080         }
3081     }
3082 
3083     return false;
3084 }
3085 
3086 /*===========================================================================
3087  * FUNCTION   : matchPreviewNotifications
3088  *
3089  * DESCRIPTION: matches preview data callbacks
3090  *
3091  * PARAMETERS :
3092  *   @data      : data to match
3093  *   @user_data : context data
3094  *
3095  * RETURN     : bool match
3096  *              true - match found
3097  *              false- match not found
3098  *==========================================================================*/
matchPreviewNotifications(void * data,void *)3099 bool QCameraCbNotifier::matchPreviewNotifications(void *data,
3100         void */*user_data*/)
3101 {
3102     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3103     if (NULL != arg) {
3104         if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
3105                 (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
3106             return true;
3107         }
3108     }
3109 
3110     return false;
3111 }
3112 
3113 /*===========================================================================
3114  * FUNCTION   : matchTimestampNotifications
3115  *
3116  * DESCRIPTION: matches timestamp data callbacks
3117  *
3118  * PARAMETERS :
3119  *   @data      : data to match
3120  *   @user_data : context data
3121  *
3122  * RETURN     : bool match
3123  *              true - match found
3124  *              false- match not found
3125  *==========================================================================*/
matchTimestampNotifications(void * data,void *)3126 bool QCameraCbNotifier::matchTimestampNotifications(void *data,
3127         void */*user_data*/)
3128 {
3129     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3130     if (NULL != arg) {
3131         if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
3132                 (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
3133             return true;
3134         }
3135     }
3136 
3137     return false;
3138 }
3139 
3140 /*===========================================================================
3141  * FUNCTION   : cbNotifyRoutine
3142  *
3143  * DESCRIPTION: callback thread which interfaces with the upper layers
3144  *              given input commands.
3145  *
3146  * PARAMETERS :
3147  *   @data    : context data
3148  *
3149  * RETURN     : None
3150  *==========================================================================*/
cbNotifyRoutine(void * data)3151 void * QCameraCbNotifier::cbNotifyRoutine(void * data)
3152 {
3153     int running = 1;
3154     int ret;
3155     QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
3156     QCameraCmdThread *cmdThread = &pme->mProcTh;
3157     cmdThread->setName("CAM_cbNotify");
3158     uint8_t isSnapshotActive = FALSE;
3159     bool longShotEnabled = false;
3160     uint32_t numOfSnapshotExpected = 0;
3161     uint32_t numOfSnapshotRcvd = 0;
3162     int32_t cbStatus = NO_ERROR;
3163 
3164     LOGD("E");
3165     do {
3166         do {
3167             ret = cam_sem_wait(&cmdThread->cmd_sem);
3168             if (ret != 0 && errno != EINVAL) {
3169                 LOGD("cam_sem_wait error (%s)",
3170                             strerror(errno));
3171                 return NULL;
3172             }
3173         } while (ret != 0);
3174 
3175         camera_cmd_type_t cmd = cmdThread->getCmd();
3176         LOGD("get cmd %d", cmd);
3177         switch (cmd) {
3178         case CAMERA_CMD_TYPE_START_DATA_PROC:
3179             {
3180                 isSnapshotActive = TRUE;
3181                 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
3182                 longShotEnabled = pme->mParent->isLongshotEnabled();
3183                 LOGD("Num Snapshots Expected = %d",
3184                        numOfSnapshotExpected);
3185                 numOfSnapshotRcvd = 0;
3186             }
3187             break;
3188         case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3189             {
3190                 pme->mDataQ.flushNodes(matchSnapshotNotifications);
3191                 isSnapshotActive = FALSE;
3192 
3193                 numOfSnapshotExpected = 0;
3194                 numOfSnapshotRcvd = 0;
3195             }
3196             break;
3197         case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3198             {
3199                 qcamera_callback_argm_t *cb =
3200                     (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
3201                 cbStatus = NO_ERROR;
3202                 if (NULL != cb) {
3203                     LOGD("cb type %d received",
3204                               cb->cb_type);
3205 
3206                     if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
3207                         switch (cb->cb_type) {
3208                         case QCAMERA_NOTIFY_CALLBACK:
3209                             {
3210                                 if (cb->msg_type == CAMERA_MSG_FOCUS) {
3211                                     KPI_ATRACE_INT("Camera:AutoFocus", 0);
3212                                     LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
3213                                 }
3214                                 if (pme->mNotifyCb) {
3215                                     pme->mNotifyCb(cb->msg_type,
3216                                                   cb->ext1,
3217                                                   cb->ext2,
3218                                                   pme->mCallbackCookie);
3219                                 } else {
3220                                     LOGW("notify callback not set!");
3221                                 }
3222                                 if (cb->release_cb) {
3223                                     cb->release_cb(cb->user_data, cb->cookie,
3224                                             cbStatus);
3225                                 }
3226                             }
3227                             break;
3228                         case QCAMERA_DATA_CALLBACK:
3229                             {
3230                                 if (pme->mDataCb) {
3231                                     pme->mDataCb(cb->msg_type,
3232                                                  cb->data,
3233                                                  cb->index,
3234                                                  cb->metadata,
3235                                                  pme->mCallbackCookie);
3236                                 } else {
3237                                     LOGW("data callback not set!");
3238                                 }
3239                                 if (cb->release_cb) {
3240                                     cb->release_cb(cb->user_data, cb->cookie,
3241                                             cbStatus);
3242                                 }
3243                             }
3244                             break;
3245                         case QCAMERA_DATA_TIMESTAMP_CALLBACK:
3246                             {
3247                                 if(pme->mDataCbTimestamp) {
3248                                     pme->mDataCbTimestamp(cb->timestamp,
3249                                                           cb->msg_type,
3250                                                           cb->data,
3251                                                           cb->index,
3252                                                           pme->mCallbackCookie);
3253                                 } else {
3254                                     LOGE("Timestamp data callback not set!");
3255                                 }
3256                                 if (cb->release_cb) {
3257                                     cb->release_cb(cb->user_data, cb->cookie,
3258                                             cbStatus);
3259                                 }
3260                             }
3261                             break;
3262                         case QCAMERA_DATA_SNAPSHOT_CALLBACK:
3263                             {
3264                                 if (TRUE == isSnapshotActive && pme->mDataCb ) {
3265                                     if (!longShotEnabled) {
3266                                         numOfSnapshotRcvd++;
3267                                         LOGI("Num Snapshots Received = %d Expected = %d",
3268                                                 numOfSnapshotRcvd, numOfSnapshotExpected);
3269                                         if (numOfSnapshotExpected > 0 &&
3270                                            (numOfSnapshotExpected == numOfSnapshotRcvd)) {
3271                                             LOGI("Received all snapshots");
3272                                             // notify HWI that snapshot is done
3273                                             pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
3274                                                                          NULL);
3275                                         }
3276                                     }
3277                                     if (pme->mJpegCb) {
3278                                         LOGI("Calling JPEG Callback!! for camera %d"
3279                                                 "release_data %p",
3280                                                 "frame_idx %d",
3281                                                  pme->mParent->getCameraId(),
3282                                                 cb->user_data,
3283                                                 cb->frame_index);
3284                                         pme->mJpegCb(cb->msg_type, cb->data,
3285                                                 cb->index, cb->metadata,
3286                                                 pme->mJpegCallbackCookie,
3287                                                 cb->frame_index, cb->release_cb,
3288                                                 cb->cookie, cb->user_data);
3289                                         // incase of non-null Jpeg cb we transfer
3290                                         // ownership of buffer to muxer. hence
3291                                         // release_cb should not be called
3292                                         // muxer will release after its done with
3293                                         // processing the buffer
3294                                     } else if(pme->mDataCb){
3295                                         pme->mDataCb(cb->msg_type, cb->data, cb->index,
3296                                                 cb->metadata, pme->mCallbackCookie);
3297                                         if (cb->release_cb) {
3298                                             cb->release_cb(cb->user_data, cb->cookie,
3299                                                     cbStatus);
3300                                         }
3301                                     }
3302                                 }
3303                             }
3304                             break;
3305                         default:
3306                             {
3307                                 LOGE("invalid cb type %d",
3308                                           cb->cb_type);
3309                                 cbStatus = BAD_VALUE;
3310                                 if (cb->release_cb) {
3311                                     cb->release_cb(cb->user_data, cb->cookie,
3312                                             cbStatus);
3313                                 }
3314                             }
3315                             break;
3316                         };
3317                     } else {
3318                         LOGW("cb message type %d not enabled!",
3319                                   cb->msg_type);
3320                         cbStatus = INVALID_OPERATION;
3321                         if (cb->release_cb) {
3322                             cb->release_cb(cb->user_data, cb->cookie, cbStatus);
3323                         }
3324                     }
3325                     delete cb;
3326                 } else {
3327                     LOGW("invalid cb type passed");
3328                 }
3329             }
3330             break;
3331         case CAMERA_CMD_TYPE_EXIT:
3332             {
3333                 running = 0;
3334                 pme->mDataQ.flush();
3335             }
3336             break;
3337         default:
3338             break;
3339         }
3340     } while (running);
3341     LOGD("X");
3342 
3343     return NULL;
3344 }
3345 
3346 /*===========================================================================
3347  * FUNCTION   : notifyCallback
3348  *
3349  * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
3350  *
3351  * PARAMETERS :
3352  *   @cbArgs  : callback arguments
3353  *
3354  * RETURN     : int32_t type of status
3355  *              NO_ERROR  -- success
3356  *              none-zero failure code
3357  *==========================================================================*/
notifyCallback(qcamera_callback_argm_t & cbArgs)3358 int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
3359 {
3360     if (!mActive) {
3361         LOGE("notify thread is not active");
3362         return UNKNOWN_ERROR;
3363     }
3364 
3365     qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
3366     if (NULL == cbArg) {
3367         LOGE("no mem for qcamera_callback_argm_t");
3368         return NO_MEMORY;
3369     }
3370     memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
3371     *cbArg = cbArgs;
3372 
3373     if (mDataQ.enqueue((void *)cbArg)) {
3374         return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
3375     } else {
3376         LOGE("Error adding cb data into queue");
3377         delete cbArg;
3378         return UNKNOWN_ERROR;
3379     }
3380 }
3381 
3382 /*===========================================================================
3383  * FUNCTION   : setCallbacks
3384  *
3385  * DESCRIPTION: Initializes the callback functions, which would be used for
3386  *              communication with the upper layers and launches the callback
3387  *              context in which the callbacks will occur.
3388  *
3389  * PARAMETERS :
3390  *   @notifyCb          : notification callback
3391  *   @dataCb            : data callback
3392  *   @dataCbTimestamp   : data with timestamp callback
3393  *   @callbackCookie    : callback context data
3394  *
3395  * RETURN     : None
3396  *==========================================================================*/
setCallbacks(camera_notify_callback notifyCb,camera_data_callback dataCb,camera_data_timestamp_callback dataCbTimestamp,void * callbackCookie)3397 void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
3398                                      camera_data_callback dataCb,
3399                                      camera_data_timestamp_callback dataCbTimestamp,
3400                                      void *callbackCookie)
3401 {
3402     if ( ( NULL == mNotifyCb ) &&
3403          ( NULL == mDataCb ) &&
3404          ( NULL == mDataCbTimestamp ) &&
3405          ( NULL == mCallbackCookie ) ) {
3406         mNotifyCb = notifyCb;
3407         mDataCb = dataCb;
3408         mDataCbTimestamp = dataCbTimestamp;
3409         mCallbackCookie = callbackCookie;
3410         mActive = true;
3411         mProcTh.launch(cbNotifyRoutine, this);
3412     } else {
3413         LOGE("Camera callback notifier already initialized!");
3414     }
3415 }
3416 
3417 /*===========================================================================
3418  * FUNCTION   : setJpegCallBacks
3419  *
3420  * DESCRIPTION: Initializes the JPEG callback function, which would be used for
3421  *              communication with the upper layers and launches the callback
3422  *              context in which the callbacks will occur.
3423  *
3424  * PARAMETERS :
3425  *   @jpegCb          : notification callback
3426  *   @callbackCookie    : callback context data
3427  *
3428  * RETURN     : None
3429  *==========================================================================*/
setJpegCallBacks(jpeg_data_callback jpegCb,void * callbackCookie)3430 void QCameraCbNotifier::setJpegCallBacks(
3431         jpeg_data_callback jpegCb, void *callbackCookie)
3432 {
3433     LOGH("Setting JPEG Callback notifier");
3434     mJpegCb        = jpegCb;
3435     mJpegCallbackCookie  = callbackCookie;
3436 }
3437 
3438 /*===========================================================================
3439  * FUNCTION   : flushPreviewNotifications
3440  *
3441  * DESCRIPTION: flush all pending preview notifications
3442  *              from the notifier queue
3443  *
3444  * PARAMETERS : None
3445  *
3446  * RETURN     : int32_t type of status
3447  *              NO_ERROR  -- success
3448  *              none-zero failure code
3449  *==========================================================================*/
flushPreviewNotifications()3450 int32_t QCameraCbNotifier::flushPreviewNotifications()
3451 {
3452     if (!mActive) {
3453         LOGE("notify thread is not active");
3454         return UNKNOWN_ERROR;
3455     }
3456     mDataQ.flushNodes(matchPreviewNotifications);
3457     return NO_ERROR;
3458 }
3459 
3460 /*===========================================================================
3461  * FUNCTION   : flushVideoNotifications
3462  *
3463  * DESCRIPTION: flush all pending video notifications
3464  *              from the notifier queue
3465  *
3466  * PARAMETERS : None
3467  *
3468  * RETURN     : int32_t type of status
3469  *              NO_ERROR  -- success
3470  *              none-zero failure code
3471  *==========================================================================*/
flushVideoNotifications()3472 int32_t QCameraCbNotifier::flushVideoNotifications()
3473 {
3474     if (!mActive) {
3475         LOGE("notify thread is not active");
3476         return UNKNOWN_ERROR;
3477     }
3478     mDataQ.flushNodes(matchTimestampNotifications);
3479     return NO_ERROR;
3480 }
3481 
3482 /*===========================================================================
3483  * FUNCTION   : startSnapshots
3484  *
3485  * DESCRIPTION: Enables snapshot mode
3486  *
3487  * PARAMETERS : None
3488  *
3489  * RETURN     : int32_t type of status
3490  *              NO_ERROR  -- success
3491  *              none-zero failure code
3492  *==========================================================================*/
startSnapshots()3493 int32_t QCameraCbNotifier::startSnapshots()
3494 {
3495     return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
3496 }
3497 
3498 /*===========================================================================
3499  * FUNCTION   : stopSnapshots
3500  *
3501  * DESCRIPTION: Disables snapshot processing mode
3502  *
3503  * PARAMETERS : None
3504  *
3505  * RETURN     : None
3506  *==========================================================================*/
stopSnapshots()3507 void QCameraCbNotifier::stopSnapshots()
3508 {
3509     mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
3510 }
3511 
3512 }; // namespace qcamera
3513