1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera2HWI"
31 
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
37 #include STAT_H
38 #include <utils/Errors.h>
39 
40 // OpenMAX dependencies
41 #include "QComOMXMetadata.h"
42 
43 // Camera dependencies
44 #include "QCamera2HWI.h"
45 #include "QCameraTrace.h"
46 
47 extern "C" {
48 #include "mm_camera_dbg.h"
49 }
50 
51 namespace qcamera {
52 
53 /*===========================================================================
54  * FUNCTION   : zsl_channel_cb
55  *
56  * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
57  *              mm-camera-interface
58  *
59  * PARAMETERS :
60  *   @recvd_frame : received super buffer
61  *   @userdata    : user data ptr
62  *
63  * RETURN    : None
64  *
65  * NOTE      : recvd_frame will be released after this call by caller, so if
66  *             async operation needed for recvd_frame, it's our responsibility
67  *             to save a copy for this variable to be used later.
68  *==========================================================================*/
zsl_channel_cb(mm_camera_super_buf_t * recvd_frame,void * userdata)69 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
70                                                void *userdata)
71 {
72     ATRACE_CALL();
73     LOGH("[KPI Perf]: E");
74     char value[PROPERTY_VALUE_MAX];
75     bool dump_raw = false;
76     bool log_matching = false;
77     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
78     if (pme == NULL ||
79         pme->mCameraHandle == NULL ||
80         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
81        LOGE("camera obj not valid");
82        return;
83     }
84 
85     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
86     if (pChannel == NULL ||
87         pChannel->getMyHandle() != recvd_frame->ch_id) {
88         LOGE("ZSL channel doesn't exist, return here");
89         return;
90     }
91 
92     if(pme->mParameters.isSceneSelectionEnabled() &&
93             !pme->m_stateMachine.isCaptureRunning()) {
94         pme->selectScene(pChannel, recvd_frame);
95         pChannel->bufDone(recvd_frame);
96         return;
97     }
98 
99     LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
100            recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
101     if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
102         qcamera_sm_internal_evt_payload_t *payload =
103                 (qcamera_sm_internal_evt_payload_t *)malloc(
104                         sizeof(qcamera_sm_internal_evt_payload_t));
105         if (NULL != payload) {
106             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
107             payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
108             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
109             if (rc != NO_ERROR) {
110                 LOGE("processEvt for retro AEC unlock failed");
111                 free(payload);
112                 payload = NULL;
113             }
114         } else {
115             LOGE("No memory for retro AEC event");
116         }
117     }
118 
119     // Check if retro-active frames are completed and camera is
120     // ready to go ahead with LED estimation for regular frames
121     if (recvd_frame->bReadyForPrepareSnapshot) {
122         // Send an event
123         LOGD("Ready for Prepare Snapshot, signal ");
124         qcamera_sm_internal_evt_payload_t *payload =
125                     (qcamera_sm_internal_evt_payload_t *)malloc(
126                     sizeof(qcamera_sm_internal_evt_payload_t));
127         if (NULL != payload) {
128             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
129             payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
130             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
131             if (rc != NO_ERROR) {
132                 LOGW("processEvt Ready for Snaphot failed");
133                 free(payload);
134                 payload = NULL;
135             }
136         } else {
137             LOGE("No memory for prepare signal event detect"
138                     " qcamera_sm_internal_evt_payload_t");
139         }
140     }
141 
142     /* indicate the parent that capture is done */
143     pme->captureDone();
144 
145     // save a copy for the superbuf
146     mm_camera_super_buf_t* frame =
147                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
148     if (frame == NULL) {
149         LOGE("Error allocating memory to save received_frame structure.");
150         pChannel->bufDone(recvd_frame);
151         return;
152     }
153     *frame = *recvd_frame;
154 
155     if (recvd_frame->num_bufs > 0) {
156         LOGI("[KPI Perf]: superbuf frame_idx %d",
157             recvd_frame->bufs[0]->frame_idx);
158     }
159 
160     // DUMP RAW if available
161     property_get("persist.camera.zsl_raw", value, "0");
162     dump_raw = atoi(value) > 0 ? true : false;
163     if (dump_raw) {
164         for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
165             if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
166                 mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
167                 QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
168                 if (NULL != pStream) {
169                     pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
170                 }
171                 break;
172             }
173         }
174     }
175 
176     for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
177         if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
178             mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
179             QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
180             if (NULL != pStream) {
181                 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
182             }
183             break;
184         }
185     }
186     //
187     // whether need FD Metadata along with Snapshot frame in ZSL mode
188     if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
189         //Need Face Detection result for snapshot frames
190         //Get the Meta Data frames
191         mm_camera_buf_def_t *pMetaFrame = NULL;
192         for (uint32_t i = 0; i < frame->num_bufs; i++) {
193             QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
194             if (pStream != NULL) {
195                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
196                     pMetaFrame = frame->bufs[i]; //find the metadata
197                     break;
198                 }
199             }
200         }
201 
202         if(pMetaFrame != NULL){
203             metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
204             //send the face detection info
205             cam_faces_data_t faces_data;
206             pme->fillFacesData(faces_data, pMetaData);
207             //HARD CODE here before MCT can support
208             faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
209 
210             qcamera_sm_internal_evt_payload_t *payload =
211                 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
212             if (NULL != payload) {
213                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
214                 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
215                 payload->faces_data = faces_data;
216                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
217                 if (rc != NO_ERROR) {
218                     LOGW("processEvt face_detection_result failed");
219                     free(payload);
220                     payload = NULL;
221                 }
222             } else {
223                 LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
224             }
225         }
226     }
227 
228     property_get("persist.camera.dumpmetadata", value, "0");
229     int32_t enabled = atoi(value);
230     if (enabled) {
231         mm_camera_buf_def_t *pMetaFrame = NULL;
232         QCameraStream *pStream = NULL;
233         for (uint32_t i = 0; i < frame->num_bufs; i++) {
234             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
235             if (pStream != NULL) {
236                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
237                     pMetaFrame = frame->bufs[i];
238                     if (pMetaFrame != NULL &&
239                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
240                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
241                     }
242                     break;
243                 }
244             }
245         }
246     }
247 
248     property_get("persist.camera.zsl_matching", value, "0");
249     log_matching = atoi(value) > 0 ? true : false;
250     if (log_matching) {
251         LOGH("ZSL super buffer contains:");
252         QCameraStream *pStream = NULL;
253         for (uint32_t i = 0; i < frame->num_bufs; i++) {
254             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
255             if (pStream != NULL ) {
256                 LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
257                         frame->bufs[i]->buf_idx,
258                         frame->bufs[i]->frame_idx,
259                         pStream->getMyType(),
260                         frame->bufs[i]->ts.tv_sec,
261                         frame->bufs[i]->ts.tv_nsec);
262             }
263         }
264     }
265 
266     // Wait on Postproc initialization if needed
267     // then send to postprocessor
268     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
269             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
270         LOGE("Failed to trigger process data");
271         pChannel->bufDone(recvd_frame);
272         free(frame);
273         frame = NULL;
274         return;
275     }
276 
277     LOGH("[KPI Perf]: X");
278 }
279 
280 /*===========================================================================
281  * FUNCTION   : selectScene
282  *
283  * DESCRIPTION: send a preview callback when a specific selected scene is applied
284  *
285  * PARAMETERS :
286  *   @pChannel: Camera channel
287  *   @frame   : Bundled super buffer
288  *
289  * RETURN     : int32_t type of status
290  *              NO_ERROR  -- success
291  *              none-zero failure code
292  *==========================================================================*/
selectScene(QCameraChannel * pChannel,mm_camera_super_buf_t * frame)293 int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
294         mm_camera_super_buf_t *frame)
295 {
296     mm_camera_buf_def_t *pMetaFrame = NULL;
297     QCameraStream *pStream = NULL;
298     int32_t rc = NO_ERROR;
299 
300     if ((NULL == frame) || (NULL == pChannel)) {
301         LOGE("Invalid scene select input");
302         return BAD_VALUE;
303     }
304 
305     cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
306     if (CAM_SCENE_MODE_MAX == selectedScene) {
307         LOGL("No selected scene");
308         return NO_ERROR;
309     }
310 
311     for (uint32_t i = 0; i < frame->num_bufs; i++) {
312         pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
313         if (pStream != NULL) {
314             if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
315                 pMetaFrame = frame->bufs[i];
316                 break;
317             }
318         }
319     }
320 
321     if (NULL == pMetaFrame) {
322         LOGE("No metadata buffer found in scene select super buffer");
323         return NO_INIT;
324     }
325 
326     metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
327 
328     IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
329         if ((*scene == selectedScene) &&
330                 (mDataCb != NULL) &&
331                 (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
332             mm_camera_buf_def_t *preview_frame = NULL;
333             for (uint32_t i = 0; i < frame->num_bufs; i++) {
334                 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
335                 if (pStream != NULL) {
336                     if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
337                         preview_frame = frame->bufs[i];
338                         break;
339                     }
340                 }
341             }
342             if (preview_frame) {
343                 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
344                 uint32_t idx = preview_frame->buf_idx;
345                 rc = sendPreviewCallback(pStream, memory, idx);
346                 if (NO_ERROR != rc) {
347                     LOGE("Error triggering scene select preview callback");
348                 } else {
349                     mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
350                 }
351             } else {
352                 LOGE("No preview buffer found in scene select super buffer");
353                 return NO_INIT;
354             }
355         }
356     } else {
357         LOGE("No current scene metadata!");
358         rc = NO_INIT;
359     }
360 
361     return rc;
362 }
363 
364 /*===========================================================================
365  * FUNCTION   : capture_channel_cb_routine
366  *
367  * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
368  *              mm-camera-interface
369  *
370  * PARAMETERS :
371  *   @recvd_frame : received super buffer
372  *   @userdata    : user data ptr
373  *
374  * RETURN    : None
375  *
376  * NOTE      : recvd_frame will be released after this call by caller, so if
377  *             async operation needed for recvd_frame, it's our responsibility
378  *             to save a copy for this variable to be used later.
379 *==========================================================================*/
capture_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)380 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
381                                                            void *userdata)
382 {
383     KPI_ATRACE_CALL();
384     char value[PROPERTY_VALUE_MAX];
385     LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
386     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
387     if (pme == NULL ||
388         pme->mCameraHandle == NULL ||
389         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
390         LOGE("camera obj not valid");
391         return;
392     }
393 
394     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
395     if (pChannel == NULL ||
396         pChannel->getMyHandle() != recvd_frame->ch_id) {
397         LOGE("Capture channel doesn't exist, return here");
398         return;
399     }
400 
401     // save a copy for the superbuf
402     mm_camera_super_buf_t* frame =
403                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
404     if (frame == NULL) {
405         LOGE("Error allocating memory to save received_frame structure.");
406         pChannel->bufDone(recvd_frame);
407         return;
408     }
409     *frame = *recvd_frame;
410 
411     if (recvd_frame->num_bufs > 0) {
412         LOGI("[KPI Perf]: superbuf frame_idx %d",
413                 recvd_frame->bufs[0]->frame_idx);
414     }
415 
416     for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
417         if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
418             mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
419             QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
420             if ( NULL != pStream ) {
421                 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
422             }
423             break;
424         }
425     }
426 
427     property_get("persist.camera.dumpmetadata", value, "0");
428     int32_t enabled = atoi(value);
429     if (enabled) {
430         mm_camera_buf_def_t *pMetaFrame = NULL;
431         QCameraStream *pStream = NULL;
432         for (uint32_t i = 0; i < frame->num_bufs; i++) {
433             pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
434             if (pStream != NULL) {
435                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
436                     pMetaFrame = frame->bufs[i]; //find the metadata
437                     if (pMetaFrame != NULL &&
438                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
439                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
440                     }
441                     break;
442                 }
443             }
444         }
445     }
446 
447     // Wait on Postproc initialization if needed
448     // then send to postprocessor
449     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
450             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
451         LOGE("Failed to trigger process data");
452         pChannel->bufDone(recvd_frame);
453         free(frame);
454         frame = NULL;
455         return;
456     }
457 
458 /* START of test register face image for face authentication */
459 #ifdef QCOM_TEST_FACE_REGISTER_FACE
460     static uint8_t bRunFaceReg = 1;
461 
462     if (bRunFaceReg > 0) {
463         // find snapshot frame
464         QCameraStream *main_stream = NULL;
465         mm_camera_buf_def_t *main_frame = NULL;
466         for (int i = 0; i < recvd_frame->num_bufs; i++) {
467             QCameraStream *pStream =
468                 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
469             if (pStream != NULL) {
470                 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
471                     main_stream = pStream;
472                     main_frame = recvd_frame->bufs[i];
473                     break;
474                 }
475             }
476         }
477         if (main_stream != NULL && main_frame != NULL) {
478             int32_t faceId = -1;
479             cam_pp_offline_src_config_t config;
480             memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
481             config.num_of_bufs = 1;
482             main_stream->getFormat(config.input_fmt);
483             main_stream->getFrameDimension(config.input_dim);
484             main_stream->getFrameOffset(config.input_buf_planes.plane_info);
485             LOGH("DEBUG: registerFaceImage E");
486             int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
487             LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
488             bRunFaceReg = 0;
489         }
490     }
491 
492 #endif
493 /* END of test register face image for face authentication */
494 
495     LOGH("[KPI Perf]: X");
496 }
497 #ifdef TARGET_TS_MAKEUP
TsMakeupProcess_Preview(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)498 bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
499         QCameraStream * pStream) {
500     LOGD("begin");
501     bool bRet = false;
502     if (pStream == NULL || pFrame == NULL) {
503         bRet = false;
504         LOGH("pStream == NULL || pFrame == NULL");
505     } else {
506         bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
507     }
508     LOGD("end bRet = %d ",bRet);
509     return bRet;
510 }
511 
TsMakeupProcess_Snapshot(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)512 bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
513         QCameraStream * pStream) {
514     LOGD("begin");
515     bool bRet = false;
516     if (pStream == NULL || pFrame == NULL) {
517         bRet = false;
518         LOGH("pStream == NULL || pFrame == NULL");
519     } else {
520         cam_frame_len_offset_t offset;
521         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
522         pStream->getFrameOffset(offset);
523 
524         cam_dimension_t dim;
525         pStream->getFrameDimension(dim);
526 
527         unsigned char *yBuf  = (unsigned char*)pFrame->buffer;
528         unsigned char *uvBuf = yBuf + offset.mp[0].len;
529         TSMakeupDataEx inMakeupData;
530         inMakeupData.frameWidth  = dim.width;
531         inMakeupData.frameHeight = dim.height;
532         inMakeupData.yBuf  = yBuf;
533         inMakeupData.uvBuf = uvBuf;
534         inMakeupData.yStride  = offset.mp[0].stride;
535         inMakeupData.uvStride = offset.mp[1].stride;
536         LOGD("detect begin");
537         TSHandle fd_handle = ts_detectface_create_context();
538         if (fd_handle != NULL) {
539             cam_format_t fmt;
540             pStream->getFormat(fmt);
541             int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
542             LOGD("ts_detectface_detect iret = %d",iret);
543             if (iret <= 0) {
544                 bRet = false;
545             } else {
546                 TSRect faceRect;
547                 memset(&faceRect,-1,sizeof(TSRect));
548                 iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
549                 LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
550                         "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
551                         ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
552                 bRet = TsMakeupProcess(pFrame,pStream,faceRect);
553             }
554             ts_detectface_destroy_context(&fd_handle);
555             fd_handle = NULL;
556         } else {
557             LOGH("fd_handle == NULL");
558         }
559         LOGD("detect end");
560     }
561     LOGD("end bRet = %d ",bRet);
562     return bRet;
563 }
564 
TsMakeupProcess(mm_camera_buf_def_t * pFrame,QCameraStream * pStream,TSRect & faceRect)565 bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
566         QCameraStream * pStream,TSRect& faceRect) {
567     bool bRet = false;
568     LOGD("begin");
569     if (pStream == NULL || pFrame == NULL) {
570         LOGH("pStream == NULL || pFrame == NULL ");
571         return false;
572     }
573 
574     int whiteLevel, cleanLevel;
575     bool enableMakeup = (faceRect.left > -1) &&
576             (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
577     if (enableMakeup) {
578         cam_dimension_t dim;
579         cam_frame_len_offset_t offset;
580         pStream->getFrameDimension(dim);
581         pStream->getFrameOffset(offset);
582         unsigned char *tempOriBuf = NULL;
583 
584         tempOriBuf = (unsigned char*)pFrame->buffer;
585         unsigned char *yBuf = tempOriBuf;
586         unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
587         unsigned char *tmpBuf = new unsigned char[offset.frame_len];
588         if (tmpBuf == NULL) {
589             LOGH("tmpBuf == NULL ");
590             return false;
591         }
592         TSMakeupDataEx inMakeupData, outMakeupData;
593         whiteLevel =  whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
594         cleanLevel =  cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
595         inMakeupData.frameWidth = dim.width;  // NV21 Frame width  > 0
596         inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
597         inMakeupData.yBuf =  yBuf; //  Y buffer pointer
598         inMakeupData.uvBuf = uvBuf; // VU buffer pointer
599         inMakeupData.yStride  = offset.mp[0].stride;
600         inMakeupData.uvStride = offset.mp[1].stride;
601         outMakeupData.frameWidth = dim.width; // NV21 Frame width  > 0
602         outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
603         outMakeupData.yBuf =  tmpBuf; //  Y buffer pointer
604         outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
605         outMakeupData.yStride  = offset.mp[0].stride;
606         outMakeupData.uvStride = offset.mp[1].stride;
607         LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
608             faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
609         ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
610         memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
611         QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
612         memory->cleanCache(pFrame->buf_idx);
613         if (tmpBuf != NULL) {
614             delete[] tmpBuf;
615             tmpBuf = NULL;
616         }
617     }
618     LOGD("end bRet = %d ",bRet);
619     return bRet;
620 }
621 #endif
622 /*===========================================================================
623  * FUNCTION   : postproc_channel_cb_routine
624  *
625  * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
626  *              mm-camera-interface
627  *
628  * PARAMETERS :
629  *   @recvd_frame : received super buffer
630  *   @userdata    : user data ptr
631  *
632  * RETURN    : None
633  *
634  * NOTE      : recvd_frame will be released after this call by caller, so if
635  *             async operation needed for recvd_frame, it's our responsibility
636  *             to save a copy for this variable to be used later.
637 *==========================================================================*/
postproc_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)638 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
639                                                             void *userdata)
640 {
641     ATRACE_CALL();
642     LOGH("[KPI Perf]: E");
643     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
644     if (pme == NULL ||
645         pme->mCameraHandle == NULL ||
646         pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
647         LOGE("camera obj not valid");
648         return;
649     }
650 
651     // save a copy for the superbuf
652     mm_camera_super_buf_t* frame =
653                (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
654     if (frame == NULL) {
655         LOGE("Error allocating memory to save received_frame structure.");
656         return;
657     }
658     *frame = *recvd_frame;
659 
660     if (recvd_frame->num_bufs > 0) {
661         LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
662     }
663     // Wait on JPEG create session
664     pme->waitDeferredWork(pme->mJpegJob);
665 
666     // send to postprocessor
667     pme->m_postprocessor.processPPData(frame);
668 
669     ATRACE_INT("Camera:Reprocess", 0);
670     LOGH("[KPI Perf]: X");
671 }
672 
673 /*===========================================================================
674  * FUNCTION   : synchronous_stream_cb_routine
675  *
676  * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
677  *
678  * PARAMETERS :
679  *   @super_frame : received super buffer
680  *   @stream      : stream object
681  *   @userdata    : user data ptr
682  *
683  * RETURN    : None
684  *
685  * NOTE      : This Function is excecuted in mm-interface context.
686  *             Avoid adding latency on this thread.
687  *==========================================================================*/
synchronous_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)688 void QCamera2HardwareInterface::synchronous_stream_cb_routine(
689         mm_camera_super_buf_t *super_frame, QCameraStream * stream,
690         void *userdata)
691 {
692     nsecs_t frameTime = 0, mPreviewTimestamp = 0;
693     int err = NO_ERROR;
694 
695     ATRACE_CALL();
696     LOGH("[KPI Perf] : BEGIN");
697     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
698     QCameraGrallocMemory *memory = NULL;
699 
700     if (pme == NULL) {
701         LOGE("Invalid hardware object");
702         return;
703     }
704     if (super_frame == NULL) {
705         LOGE("Invalid super buffer");
706         return;
707     }
708     mm_camera_buf_def_t *frame = super_frame->bufs[0];
709     if (NULL == frame) {
710         LOGE("Frame is NULL");
711         return;
712     }
713 
714     if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
715         LOGE("This is only for PREVIEW stream for now");
716         return;
717     }
718 
719     if(pme->m_bPreviewStarted) {
720         LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
721         pme->m_bPreviewStarted = false;
722     }
723 
724     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
725         pthread_mutex_lock(&pme->mGrallocLock);
726         pme->mLastPreviewFrameID = frame->frame_idx;
727         pthread_mutex_unlock(&pme->mGrallocLock);
728         LOGH("preview is not running, no need to process");
729         return;
730     }
731 
732     frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
733     // Calculate the future presentation time stamp for displaying frames at regular interval
734     mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
735     stream->mStreamTimestamp = frameTime;
736     memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
737 
738 #ifdef TARGET_TS_MAKEUP
739     pme->TsMakeupProcess_Preview(frame,stream);
740 #endif
741 
742     // Enqueue  buffer to gralloc.
743     uint32_t idx = frame->buf_idx;
744     LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
745             pme, idx, frameTime, mPreviewTimestamp);
746     err = memory->enqueueBuffer(idx, mPreviewTimestamp);
747 
748     if (err == NO_ERROR) {
749         pthread_mutex_lock(&pme->mGrallocLock);
750         pme->mLastPreviewFrameID = frame->frame_idx;
751         pme->mEnqueuedBuffers++;
752         pthread_mutex_unlock(&pme->mGrallocLock);
753     } else {
754         LOGE("Enqueue Buffer failed");
755     }
756 
757     LOGH("[KPI Perf] : END");
758     return;
759 }
760 
761 /*===========================================================================
762  * FUNCTION   : preview_stream_cb_routine
763  *
764  * DESCRIPTION: helper function to handle preview frame from preview stream in
765  *              normal case with display.
766  *
767  * PARAMETERS :
768  *   @super_frame : received super buffer
769  *   @stream      : stream object
770  *   @userdata    : user data ptr
771  *
772  * RETURN    : None
773  *
774  * NOTE      : caller passes the ownership of super_frame, it's our
775  *             responsibility to free super_frame once it's done. The new
776  *             preview frame will be sent to display, and an older frame
777  *             will be dequeued from display and needs to be returned back
778  *             to kernel for future use.
779  *==========================================================================*/
preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)780 void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
781                                                           QCameraStream * stream,
782                                                           void *userdata)
783 {
784     KPI_ATRACE_CALL();
785     LOGH("[KPI Perf] : BEGIN");
786     int err = NO_ERROR;
787     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
788     QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
789     uint8_t dequeueCnt = 0;
790 
791     if (pme == NULL) {
792         LOGE("Invalid hardware object");
793         free(super_frame);
794         return;
795     }
796     if (memory == NULL) {
797         LOGE("Invalid memory object");
798         free(super_frame);
799         return;
800     }
801 
802     mm_camera_buf_def_t *frame = super_frame->bufs[0];
803     if (NULL == frame) {
804         LOGE("preview frame is NLUL");
805         free(super_frame);
806         return;
807     }
808 
809     // For instant capture and for instant AEC, keep track of the frame counter.
810     // This count will be used to check against the corresponding bound values.
811     if (pme->mParameters.isInstantAECEnabled() ||
812             pme->mParameters.isInstantCaptureEnabled()) {
813         pme->mInstantAecFrameCount++;
814     }
815 
816     pthread_mutex_lock(&pme->mGrallocLock);
817     if (!stream->isSyncCBEnabled()) {
818         pme->mLastPreviewFrameID = frame->frame_idx;
819     }
820     if (((!stream->isSyncCBEnabled()) &&
821             (!pme->needProcessPreviewFrame(frame->frame_idx))) ||
822             ((stream->isSyncCBEnabled()) &&
823             (memory->isBufOwnedByCamera(frame->buf_idx)))) {
824         //If buffer owned by camera, then it is not enqueued to display.
825         // bufDone it back to backend.
826         pthread_mutex_unlock(&pme->mGrallocLock);
827         LOGH("preview is not running, no need to process");
828         stream->bufDone(frame->buf_idx);
829         free(super_frame);
830         return;
831     } else {
832         pthread_mutex_unlock(&pme->mGrallocLock);
833     }
834 
835     if (pme->needDebugFps()) {
836         pme->debugShowPreviewFPS();
837     }
838 
839     uint32_t idx = frame->buf_idx;
840 
841     pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
842 
843     if(pme->m_bPreviewStarted) {
844        LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
845        pme->m_bPreviewStarted = false ;
846     }
847 
848     if (!stream->isSyncCBEnabled()) {
849         LOGD("Enqueue Buffer to display %d", idx);
850 #ifdef TARGET_TS_MAKEUP
851         pme->TsMakeupProcess_Preview(frame,stream);
852 #endif
853         err = memory->enqueueBuffer(idx);
854 
855         if (err == NO_ERROR) {
856             pthread_mutex_lock(&pme->mGrallocLock);
857             pme->mEnqueuedBuffers++;
858             dequeueCnt = pme->mEnqueuedBuffers;
859             pthread_mutex_unlock(&pme->mGrallocLock);
860         } else {
861             LOGE("Enqueue Buffer failed");
862         }
863     } else {
864         pthread_mutex_lock(&pme->mGrallocLock);
865         dequeueCnt = pme->mEnqueuedBuffers;
866         pthread_mutex_unlock(&pme->mGrallocLock);
867     }
868 
869     // Display the buffer.
870     LOGD("%p displayBuffer %d E", pme, idx);
871     uint8_t numMapped = memory->getMappable();
872 
873     for (uint8_t i = 0; i < dequeueCnt; i++) {
874         int dequeuedIdx = memory->dequeueBuffer();
875         if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
876             LOGE("Invalid dequeued buffer index %d from display",
877                    dequeuedIdx);
878             break;
879         } else {
880             pthread_mutex_lock(&pme->mGrallocLock);
881             pme->mEnqueuedBuffers--;
882             pthread_mutex_unlock(&pme->mGrallocLock);
883             if (dequeuedIdx >= numMapped) {
884                 // This buffer has not yet been mapped to the backend
885                 err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
886                 if (memory->checkIfAllBuffersMapped()) {
887                     // check if mapping is done for all the buffers
888                     // Signal the condition for create jpeg session
889                     Mutex::Autolock l(pme->mMapLock);
890                     pme->mMapCond.signal();
891                     LOGH("Mapping done for all bufs");
892                 } else {
893                     LOGH("All buffers are not yet mapped");
894                 }
895             }
896         }
897 
898         if (err < 0) {
899             LOGE("buffer mapping failed %d", err);
900         } else {
901             // Return dequeued buffer back to driver
902             err = stream->bufDone((uint32_t)dequeuedIdx);
903             if ( err < 0) {
904                 LOGW("stream bufDone failed %d", err);
905             }
906         }
907     }
908 
909     // Handle preview data callback
910     if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
911         if (pme->needSendPreviewCallback() &&
912                 (!pme->mParameters.isSceneSelectionEnabled())) {
913             int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
914             if (NO_ERROR != rc) {
915                 LOGW("Preview callback was not sent succesfully");
916             }
917         }
918     }
919 
920     free(super_frame);
921     LOGH("[KPI Perf] : END");
922     return;
923 }
924 
925 /*===========================================================================
926  * FUNCTION   : sendPreviewCallback
927  *
928  * DESCRIPTION: helper function for triggering preview callbacks
929  *
930  * PARAMETERS :
931  *   @stream    : stream object
932  *   @memory    : Stream memory allocator
933  *   @idx       : buffer index
934  *
935  * RETURN     : int32_t type of status
936  *              NO_ERROR  -- success
937  *              none-zero failure code
938  *==========================================================================*/
sendPreviewCallback(QCameraStream * stream,QCameraMemory * memory,uint32_t idx)939 int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
940         QCameraMemory *memory, uint32_t idx)
941 {
942     camera_memory_t *previewMem = NULL;
943     camera_memory_t *data = NULL;
944     camera_memory_t *dataToApp = NULL;
945     size_t previewBufSize = 0;
946     size_t previewBufSizeFromCallback = 0;
947     cam_dimension_t preview_dim;
948     cam_format_t previewFmt;
949     int32_t rc = NO_ERROR;
950     int32_t yStride = 0;
951     int32_t yScanline = 0;
952     int32_t uvStride = 0;
953     int32_t uvScanline = 0;
954     int32_t uStride = 0;
955     int32_t uScanline = 0;
956     int32_t vStride = 0;
957     int32_t vScanline = 0;
958     int32_t yStrideToApp = 0;
959     int32_t uvStrideToApp = 0;
960     int32_t yScanlineToApp = 0;
961     int32_t uvScanlineToApp = 0;
962     int32_t srcOffset = 0;
963     int32_t dstOffset = 0;
964     int32_t srcBaseOffset = 0;
965     int32_t dstBaseOffset = 0;
966     int i;
967 
968     if ((NULL == stream) || (NULL == memory)) {
969         LOGE("Invalid preview callback input");
970         return BAD_VALUE;
971     }
972 
973     cam_stream_info_t *streamInfo =
974             reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
975     if (NULL == streamInfo) {
976         LOGE("Invalid streamInfo");
977         return BAD_VALUE;
978     }
979 
980     stream->getFrameDimension(preview_dim);
981     stream->getFormat(previewFmt);
982 
983     yStrideToApp = preview_dim.width;
984     yScanlineToApp = preview_dim.height;
985     uvStrideToApp = yStrideToApp;
986     uvScanlineToApp = yScanlineToApp / 2;
987 
988     /* The preview buffer size in the callback should be
989      * (width*height*bytes_per_pixel). As all preview formats we support,
990      * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
991      * We need to put a check if some other formats are supported in future. */
992     if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
993         (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
994         (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
995         (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
996         (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
997         (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
998         if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
999             yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1000             yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1001             uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1002             uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1003             vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
1004             vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
1005 
1006             previewBufSize = (size_t)
1007                     (yStride * yScanline + uStride * uScanline + vStride * vScanline);
1008             previewBufSizeFromCallback = previewBufSize;
1009         } else {
1010             yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1011             yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1012             uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1013             uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1014 
1015             previewBufSize = (size_t)
1016                     ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
1017 
1018             previewBufSizeFromCallback = (size_t)
1019                     ((yStride * yScanline) + (uvStride * uvScanline));
1020         }
1021         if(previewBufSize == previewBufSizeFromCallback) {
1022             previewMem = mGetMemory(memory->getFd(idx),
1023                        previewBufSize, 1, mCallbackCookie);
1024             if (!previewMem || !previewMem->data) {
1025                 LOGE("mGetMemory failed.\n");
1026                 return NO_MEMORY;
1027             } else {
1028                 data = previewMem;
1029             }
1030         } else {
1031             data = memory->getMemory(idx, false);
1032             dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1033             if (!dataToApp || !dataToApp->data) {
1034                 LOGE("mGetMemory failed.\n");
1035                 return NO_MEMORY;
1036             }
1037 
1038             for (i = 0; i < preview_dim.height; i++) {
1039                 srcOffset = i * yStride;
1040                 dstOffset = i * yStrideToApp;
1041 
1042                 memcpy((unsigned char *) dataToApp->data + dstOffset,
1043                         (unsigned char *) data->data + srcOffset,
1044                         (size_t)yStrideToApp);
1045             }
1046 
1047             srcBaseOffset = yStride * yScanline;
1048             dstBaseOffset = yStrideToApp * yScanlineToApp;
1049 
1050             for (i = 0; i < preview_dim.height/2; i++) {
1051                 srcOffset = i * uvStride + srcBaseOffset;
1052                 dstOffset = i * uvStrideToApp + dstBaseOffset;
1053 
1054                 memcpy((unsigned char *) dataToApp->data + dstOffset,
1055                         (unsigned char *) data->data + srcOffset,
1056                         (size_t)yStrideToApp);
1057             }
1058         }
1059     } else {
1060         /*Invalid Buffer content. But can be used as a first preview frame trigger in
1061         framework/app */
1062         previewBufSize = (size_t)
1063                     ((yStrideToApp * yScanlineToApp) +
1064                     (uvStrideToApp * uvScanlineToApp));
1065         previewBufSizeFromCallback = 0;
1066         LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
1067                 previewBufSize);
1068         dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1069         if (!dataToApp || !dataToApp->data) {
1070             LOGE("mGetMemory failed.\n");
1071             return NO_MEMORY;
1072         }
1073     }
1074     qcamera_callback_argm_t cbArg;
1075     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1076     cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1077     cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1078     if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
1079             previewBufSize == previewBufSizeFromCallback) {
1080         cbArg.data = data;
1081     } else {
1082         cbArg.data = dataToApp;
1083     }
1084     if ( previewMem ) {
1085         cbArg.user_data = previewMem;
1086         cbArg.release_cb = releaseCameraMemory;
1087     } else if (dataToApp) {
1088         cbArg.user_data = dataToApp;
1089         cbArg.release_cb = releaseCameraMemory;
1090     }
1091     cbArg.cookie = this;
1092     rc = m_cbNotifier.notifyCallback(cbArg);
1093     if (rc != NO_ERROR) {
1094         LOGW("fail sending notification");
1095         if (previewMem) {
1096             previewMem->release(previewMem);
1097         } else if (dataToApp) {
1098             dataToApp->release(dataToApp);
1099         }
1100     }
1101 
1102     return rc;
1103 }
1104 
1105 /*===========================================================================
1106  * FUNCTION   : nodisplay_preview_stream_cb_routine
1107  *
1108  * DESCRIPTION: helper function to handle preview frame from preview stream in
1109  *              no-display case
1110  *
1111  * PARAMETERS :
1112  *   @super_frame : received super buffer
1113  *   @stream      : stream object
1114  *   @userdata    : user data ptr
1115  *
1116  * RETURN    : None
1117  *
1118  * NOTE      : caller passes the ownership of super_frame, it's our
1119  *             responsibility to free super_frame once it's done.
1120  *==========================================================================*/
nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1121 void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
1122                                                           mm_camera_super_buf_t *super_frame,
1123                                                           QCameraStream *stream,
1124                                                           void * userdata)
1125 {
1126     ATRACE_CALL();
1127     LOGH("[KPI Perf] E");
1128     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1129     if (pme == NULL ||
1130         pme->mCameraHandle == NULL ||
1131         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1132         LOGE("camera obj not valid");
1133         // simply free super frame
1134         free(super_frame);
1135         return;
1136     }
1137     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1138     if (NULL == frame) {
1139         LOGE("preview frame is NULL");
1140         free(super_frame);
1141         return;
1142     }
1143 
1144     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1145         LOGH("preview is not running, no need to process");
1146         stream->bufDone(frame->buf_idx);
1147         free(super_frame);
1148         return;
1149     }
1150 
1151     if (pme->needDebugFps()) {
1152         pme->debugShowPreviewFPS();
1153     }
1154 
1155     QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1156     camera_memory_t *preview_mem = NULL;
1157     if (previewMemObj != NULL) {
1158         preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1159     }
1160     if (NULL != previewMemObj && NULL != preview_mem) {
1161         pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
1162 
1163         if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
1164                 pme->needSendPreviewCallback() &&
1165                 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
1166             qcamera_callback_argm_t cbArg;
1167             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1168             cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1169             cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1170             cbArg.data = preview_mem;
1171             cbArg.user_data = (void *) &frame->buf_idx;
1172             cbArg.cookie = stream;
1173             cbArg.release_cb = returnStreamBuffer;
1174             int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1175             if (rc != NO_ERROR) {
1176                 LOGE ("fail sending data notify");
1177                 stream->bufDone(frame->buf_idx);
1178             }
1179         } else {
1180             stream->bufDone(frame->buf_idx);
1181         }
1182     }
1183     free(super_frame);
1184     LOGH("[KPI Perf] X");
1185 }
1186 
1187 /*===========================================================================
1188  * FUNCTION   : rdi_mode_stream_cb_routine
1189  *
1190  * DESCRIPTION: helper function to handle RDI frame from preview stream in
1191  *              rdi mode case
1192  *
1193  * PARAMETERS :
1194  *   @super_frame : received super buffer
1195  *   @stream      : stream object
1196  *   @userdata    : user data ptr
1197  *
1198  * RETURN    : None
1199  *
1200  * NOTE      : caller passes the ownership of super_frame, it's our
1201  *             responsibility to free super_frame once it's done.
1202  *==========================================================================*/
rdi_mode_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1203 void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
1204   mm_camera_super_buf_t *super_frame,
1205   QCameraStream *stream,
1206   void * userdata)
1207 {
1208     ATRACE_CALL();
1209     LOGH("RDI_DEBUG Enter");
1210     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1211     if (pme == NULL ||
1212         pme->mCameraHandle == NULL ||
1213         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1214         LOGE("camera obj not valid");
1215         free(super_frame);
1216         return;
1217     }
1218     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1219     if (NULL == frame) {
1220         LOGE("preview frame is NLUL");
1221         goto end;
1222     }
1223     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1224         LOGE("preview is not running, no need to process");
1225         stream->bufDone(frame->buf_idx);
1226         goto end;
1227     }
1228     if (pme->needDebugFps()) {
1229         pme->debugShowPreviewFPS();
1230     }
1231     // Non-secure Mode
1232     if (!pme->isSecureMode()) {
1233         QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1234         if (NULL == previewMemObj) {
1235             LOGE("previewMemObj is NULL");
1236             stream->bufDone(frame->buf_idx);
1237             goto end;
1238         }
1239 
1240         camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1241         if (NULL != preview_mem) {
1242             previewMemObj->cleanCache(frame->buf_idx);
1243             // Dump RAW frame
1244             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
1245             // Notify Preview callback frame
1246             if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1247                     pme->mDataCb != NULL &&
1248                     pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1249                 qcamera_callback_argm_t cbArg;
1250                 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1251                 cbArg.cb_type    = QCAMERA_DATA_CALLBACK;
1252                 cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
1253                 cbArg.data       = preview_mem;
1254                 cbArg.user_data = (void *) &frame->buf_idx;
1255                 cbArg.cookie     = stream;
1256                 cbArg.release_cb = returnStreamBuffer;
1257                 pme->m_cbNotifier.notifyCallback(cbArg);
1258             } else {
1259                 LOGE("preview_mem is NULL");
1260                 stream->bufDone(frame->buf_idx);
1261             }
1262         }
1263         else {
1264             LOGE("preview_mem is NULL");
1265             stream->bufDone(frame->buf_idx);
1266         }
1267     } else {
1268         // Secure Mode
1269         // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
1270         QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1271         if (NULL == previewMemObj) {
1272             LOGE("previewMemObj is NULL");
1273             stream->bufDone(frame->buf_idx);
1274             goto end;
1275         }
1276 
1277         int fd = previewMemObj->getFd(frame->buf_idx);
1278         LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
1279         if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1280                 pme->mDataCb != NULL &&
1281                 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1282             // Prepare Callback structure
1283             qcamera_callback_argm_t cbArg;
1284             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1285             cbArg.cb_type    = QCAMERA_NOTIFY_CALLBACK;
1286             cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
1287 #ifndef VANILLA_HAL
1288             cbArg.ext1       = CAMERA_FRAME_DATA_FD;
1289             cbArg.ext2       = fd;
1290 #endif
1291             cbArg.user_data  = (void *) &frame->buf_idx;
1292             cbArg.cookie     = stream;
1293             cbArg.release_cb = returnStreamBuffer;
1294             pme->m_cbNotifier.notifyCallback(cbArg);
1295         } else {
1296             LOGH("No need to process preview frame, return buffer");
1297             stream->bufDone(frame->buf_idx);
1298         }
1299     }
1300 end:
1301     free(super_frame);
1302     LOGH("RDI_DEBUG Exit");
1303     return;
1304 }
1305 
1306 /*===========================================================================
1307  * FUNCTION   : postview_stream_cb_routine
1308  *
1309  * DESCRIPTION: helper function to handle post frame from postview stream
1310  *
1311  * PARAMETERS :
1312  *   @super_frame : received super buffer
1313  *   @stream      : stream object
1314  *   @userdata    : user data ptr
1315  *
1316  * RETURN    : None
1317  *
1318  * NOTE      : caller passes the ownership of super_frame, it's our
1319  *             responsibility to free super_frame once it's done.
1320  *==========================================================================*/
postview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1321 void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1322                                                            QCameraStream *stream,
1323                                                            void *userdata)
1324 {
1325     ATRACE_CALL();
1326     int err = NO_ERROR;
1327     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1328     QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
1329 
1330     if (pme == NULL) {
1331         LOGE("Invalid hardware object");
1332         free(super_frame);
1333         return;
1334     }
1335     if (memory == NULL) {
1336         LOGE("Invalid memory object");
1337         free(super_frame);
1338         return;
1339     }
1340 
1341     LOGH("[KPI Perf] : BEGIN");
1342 
1343     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1344     if (NULL == frame) {
1345         LOGE("preview frame is NULL");
1346         free(super_frame);
1347         return;
1348     }
1349 
1350     QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
1351     if (NULL != memObj) {
1352         pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
1353     }
1354 
1355     // Return buffer back to driver
1356     err = stream->bufDone(frame->buf_idx);
1357     if ( err < 0) {
1358         LOGE("stream bufDone failed %d", err);
1359     }
1360 
1361     free(super_frame);
1362     LOGH("[KPI Perf] : END");
1363     return;
1364 }
1365 
1366 /*===========================================================================
1367  * FUNCTION   : video_stream_cb_routine
1368  *
1369  * DESCRIPTION: helper function to handle video frame from video stream
1370  *
1371  * PARAMETERS :
1372  *   @super_frame : received super buffer
1373  *   @stream      : stream object
1374  *   @userdata    : user data ptr
1375  *
1376  * RETURN    : None
1377  *
1378  * NOTE      : caller passes the ownership of super_frame, it's our
1379  *             responsibility to free super_frame once it's done. video
1380  *             frame will be sent to video encoder. Once video encoder is
1381  *             done with the video frame, it will call another API
1382  *             (release_recording_frame) to return the frame back
1383  *==========================================================================*/
video_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1384 void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1385                                                         QCameraStream *stream,
1386                                                         void *userdata)
1387 {
1388     ATRACE_CALL();
1389     QCameraMemory *videoMemObj = NULL;
1390     camera_memory_t *video_mem = NULL;
1391     nsecs_t timeStamp = 0;
1392     bool triggerTCB = FALSE;
1393 
1394     LOGH("[KPI Perf] : BEGIN");
1395     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1396     if (pme == NULL ||
1397         pme->mCameraHandle == NULL ||
1398         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1399         LOGE("camera obj not valid");
1400         // simply free super frame
1401         free(super_frame);
1402         return;
1403     }
1404 
1405     mm_camera_buf_def_t *frame = super_frame->bufs[0];
1406 
1407     if (pme->needDebugFps()) {
1408         pme->debugShowVideoFPS();
1409     }
1410     if(pme->m_bRecordStarted) {
1411        LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
1412        pme->m_bRecordStarted = false ;
1413     }
1414     LOGD("Stream(%d), Timestamp: %ld %ld",
1415           frame->stream_id,
1416           frame->ts.tv_sec,
1417           frame->ts.tv_nsec);
1418 
1419     if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
1420         if (pme->mParameters.getVideoBatchSize() == 0) {
1421             timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1422                     + frame->ts.tv_nsec;
1423             LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
1424                     timeStamp);
1425             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1426             videoMemObj = (QCameraMemory *)frame->mem_info;
1427             video_mem = NULL;
1428             if (NULL != videoMemObj) {
1429                 video_mem = videoMemObj->getMemory(frame->buf_idx,
1430                         (pme->mStoreMetaDataInFrame > 0)? true : false);
1431                 triggerTCB = TRUE;
1432             }
1433         } else {
1434             //Handle video batch callback
1435             native_handle_t *nh = NULL;
1436             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1437             QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1438             if ((stream->mCurMetaMemory == NULL)
1439                     || (stream->mCurBufIndex == -1)) {
1440                 //get Free metadata available
1441                 for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
1442                     if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
1443                         stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
1444                         stream->mCurBufIndex = 0;
1445                         stream->mCurMetaIndex = i;
1446                         stream->mStreamMetaMemory[i].numBuffers = 0;
1447                         break;
1448                     }
1449                 }
1450             }
1451             video_mem = stream->mCurMetaMemory;
1452             if (video_mem == NULL) {
1453                 LOGE("No Free metadata. Drop this frame");
1454                 stream->mCurBufIndex = -1;
1455                 stream->bufDone(frame->buf_idx);
1456                 free(super_frame);
1457                 return;
1458             }
1459 
1460             struct encoder_media_buffer_type * packet =
1461                     (struct encoder_media_buffer_type *)video_mem->data;
1462             nh = const_cast<native_handle_t *>(packet->meta_handle);
1463             int index = stream->mCurBufIndex;
1464             int fd_cnt = pme->mParameters.getVideoBatchSize();
1465             nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1466                     + frame->ts.tv_nsec;
1467             if (index == 0) {
1468                 stream->mFirstTimeStamp = frame_ts;
1469             }
1470 
1471             stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
1472                     = (uint8_t)frame->buf_idx;
1473             stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
1474             stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
1475                     = TRUE;
1476             /*
1477             * data[0] => FD
1478             * data[mNumFDs + 1] => OFFSET
1479             * data[mNumFDs + 2] => SIZE
1480             * data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1481             * data[mNumFDs + 4] => TIMESTAMP
1482             * data[mNumFDs + 5] => FORMAT
1483             */
1484             nh->data[index] = videoMemObj->getFd(frame->buf_idx);
1485             nh->data[index + fd_cnt] = 0;
1486             nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx);
1487             nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage();
1488             nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp);
1489             nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat();
1490             stream->mCurBufIndex++;
1491             if (stream->mCurBufIndex == fd_cnt) {
1492                 timeStamp = stream->mFirstTimeStamp;
1493                 LOGD("Video frame to encoder TimeStamp : %lld batch = %d",
1494                     timeStamp, fd_cnt);
1495                 stream->mCurBufIndex = -1;
1496                 stream->mCurMetaIndex = -1;
1497                 stream->mCurMetaMemory = NULL;
1498                 triggerTCB = TRUE;
1499             }
1500         }
1501     } else {
1502         videoMemObj = (QCameraMemory *)frame->mem_info;
1503         video_mem = NULL;
1504         native_handle_t *nh = NULL;
1505         int fd_cnt = frame->user_buf.bufs_used;
1506         if (NULL != videoMemObj) {
1507             video_mem = videoMemObj->getMemory(frame->buf_idx, true);
1508             if (video_mem != NULL) {
1509                 struct encoder_media_buffer_type * packet =
1510                         (struct encoder_media_buffer_type *)video_mem->data;
1511                 nh = const_cast<native_handle_t *>(packet->meta_handle);
1512             } else {
1513                 LOGE("video_mem NULL");
1514             }
1515         } else {
1516             LOGE("videoMemObj NULL");
1517         }
1518 
1519         if (nh != NULL) {
1520             timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1521                     + frame->ts.tv_nsec;
1522             LOGD("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
1523                     timeStamp, frame->fd, frame->buf_idx, fd_cnt);
1524 
1525             for (int i = 0; i < fd_cnt; i++) {
1526                 if (frame->user_buf.buf_idx[i] >= 0) {
1527                     mm_camera_buf_def_t *plane_frame =
1528                             &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
1529                     QCameraVideoMemory *frameobj =
1530                             (QCameraVideoMemory *)plane_frame->mem_info;
1531                     int usage = frameobj->getUsage();
1532                     nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
1533                             + plane_frame->ts.tv_nsec;
1534                     /*
1535                        data[0] => FD
1536                        data[mNumFDs + 1] => OFFSET
1537                        data[mNumFDs + 2] => SIZE
1538                        data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1539                        data[mNumFDs + 4] => TIMESTAMP
1540                        data[mNumFDs + 5] => FORMAT
1541                     */
1542                     nh->data[i] = frameobj->getFd(plane_frame->buf_idx);
1543                     nh->data[fd_cnt + i] = 0;
1544                     nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx);
1545                     nh->data[(3 * fd_cnt) + i] = usage;
1546                     nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp);
1547                     nh->data[(5 * fd_cnt) + i] = frameobj->getFormat();
1548                     LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
1549                             (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
1550                     pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
1551                 }
1552             }
1553             triggerTCB = TRUE;
1554         } else {
1555             LOGE("No Video Meta Available. Return Buffer");
1556             stream->bufDone(super_frame->bufs[0]->buf_idx);
1557         }
1558     }
1559 
1560     if ((NULL != video_mem) && (triggerTCB == TRUE)) {
1561         if ((pme->mDataCbTimestamp != NULL) &&
1562             pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
1563             qcamera_callback_argm_t cbArg;
1564             memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1565             cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
1566             cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
1567             cbArg.data = video_mem;
1568             cbArg.timestamp = timeStamp;
1569             int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1570             if (rc != NO_ERROR) {
1571                 LOGE("fail sending data notify");
1572                 stream->bufDone(frame->buf_idx);
1573             }
1574         }
1575     }
1576 
1577     free(super_frame);
1578     LOGH("[KPI Perf] : END");
1579 }
1580 
1581 /*===========================================================================
1582  * FUNCTION   : snapshot_channel_cb_routine
1583  *
1584  * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
1585  *
1586  * PARAMETERS :
1587  *   @super_frame : received super buffer
1588  *   @userdata    : user data ptr
1589  *
1590  * RETURN    : None
1591  *
1592  * NOTE      : recvd_frame will be released after this call by caller, so if
1593  *             async operation needed for recvd_frame, it's our responsibility
1594  *             to save a copy for this variable to be used later.
1595  *==========================================================================*/
snapshot_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1596 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1597        void *userdata)
1598 {
1599     ATRACE_CALL();
1600     char value[PROPERTY_VALUE_MAX];
1601     QCameraChannel *pChannel = NULL;
1602 
1603     LOGH("[KPI Perf]: E");
1604     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1605     if (pme == NULL ||
1606         pme->mCameraHandle == NULL ||
1607         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1608         LOGE("camera obj not valid");
1609         // simply free super frame
1610         free(super_frame);
1611         return;
1612     }
1613 
1614     if (pme->isLowPowerMode()) {
1615         pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
1616     } else {
1617         pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
1618     }
1619 
1620     if ((pChannel == NULL) || (pChannel->getMyHandle() != super_frame->ch_id)) {
1621         LOGE("Snapshot channel doesn't exist, return here");
1622         return;
1623     }
1624 
1625     property_get("persist.camera.dumpmetadata", value, "0");
1626     int32_t enabled = atoi(value);
1627     if (enabled) {
1628         if (pChannel == NULL ||
1629             pChannel->getMyHandle() != super_frame->ch_id) {
1630             LOGE("Capture channel doesn't exist, return here");
1631             return;
1632         }
1633         mm_camera_buf_def_t *pMetaFrame = NULL;
1634         QCameraStream *pStream = NULL;
1635         for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1636             pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1637             if (pStream != NULL) {
1638                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1639                     pMetaFrame = super_frame->bufs[i]; //find the metadata
1640                     if (pMetaFrame != NULL &&
1641                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1642                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
1643                     }
1644                     break;
1645                 }
1646             }
1647         }
1648     }
1649 
1650     // save a copy for the superbuf
1651     mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1652     if (frame == NULL) {
1653         LOGE("Error allocating memory to save received_frame structure.");
1654         pChannel->bufDone(super_frame);
1655         return;
1656     }
1657     *frame = *super_frame;
1658 
1659     if (frame->num_bufs > 0) {
1660         LOGI("[KPI Perf]: superbuf frame_idx %d",
1661                 frame->bufs[0]->frame_idx);
1662     }
1663 
1664     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1665             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1666         LOGE("Failed to trigger process data");
1667         pChannel->bufDone(super_frame);
1668         free(frame);
1669         frame = NULL;
1670         return;
1671     }
1672 
1673     LOGH("[KPI Perf]: X");
1674 }
1675 
1676 /*===========================================================================
1677  * FUNCTION   : raw_stream_cb_routine
1678  *
1679  * DESCRIPTION: helper function to handle raw dump frame from raw stream
1680  *
1681  * PARAMETERS :
1682  *   @super_frame : received super buffer
1683  *   @stream      : stream object
1684  *   @userdata    : user data ptr
1685  *
1686  * RETURN    : None
1687  *
1688  * NOTE      : caller passes the ownership of super_frame, it's our
1689  *             responsibility to free super_frame once it's done. For raw
1690  *             frame, there is no need to send to postprocessor for jpeg
1691  *             encoding. this function will play shutter and send the data
1692  *             callback to upper layer. Raw frame buffer will be returned
1693  *             back to kernel, and frame will be free after use.
1694  *==========================================================================*/
raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)1695 void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1696                                                       QCameraStream * /*stream*/,
1697                                                       void * userdata)
1698 {
1699     ATRACE_CALL();
1700     LOGH("[KPI Perf] : BEGIN");
1701     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1702     if (pme == NULL ||
1703         pme->mCameraHandle == NULL ||
1704         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1705         LOGE("camera obj not valid");
1706         // simply free super frame
1707         free(super_frame);
1708         return;
1709     }
1710 
1711     pme->m_postprocessor.processRawData(super_frame);
1712     LOGH("[KPI Perf] : END");
1713 }
1714 
1715 /*===========================================================================
1716  * FUNCTION   : raw_channel_cb_routine
1717  *
1718  * DESCRIPTION: helper function to handle RAW  superbuf callback directly from
1719  *              mm-camera-interface
1720  *
1721  * PARAMETERS :
1722  *   @super_frame : received super buffer
1723  *   @userdata    : user data ptr
1724  *
1725  * RETURN    : None
1726  *
1727  * NOTE      : recvd_frame will be released after this call by caller, so if
1728  *             async operation needed for recvd_frame, it's our responsibility
1729  *             to save a copy for this variable to be used later.
1730 *==========================================================================*/
raw_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1731 void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1732         void *userdata)
1733 
1734 {
1735     ATRACE_CALL();
1736     char value[PROPERTY_VALUE_MAX];
1737 
1738     LOGH("[KPI Perf]: E");
1739     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1740     if (pme == NULL ||
1741         pme->mCameraHandle == NULL ||
1742         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1743         LOGE("camera obj not valid");
1744         // simply free super frame
1745         free(super_frame);
1746         return;
1747     }
1748 
1749     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
1750     if (pChannel == NULL) {
1751         LOGE("RAW channel doesn't exist, return here");
1752         return;
1753     }
1754 
1755     if (pChannel->getMyHandle() != super_frame->ch_id) {
1756         LOGE("Invalid Input super buffer");
1757         pChannel->bufDone(super_frame);
1758         return;
1759     }
1760 
1761     property_get("persist.camera.dumpmetadata", value, "0");
1762     int32_t enabled = atoi(value);
1763     if (enabled) {
1764         mm_camera_buf_def_t *pMetaFrame = NULL;
1765         QCameraStream *pStream = NULL;
1766         for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1767             pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1768             if (pStream != NULL) {
1769                 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1770                     pMetaFrame = super_frame->bufs[i]; //find the metadata
1771                     if (pMetaFrame != NULL &&
1772                             ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1773                         pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
1774                     }
1775                     break;
1776                 }
1777             }
1778         }
1779     }
1780 
1781     // save a copy for the superbuf
1782     mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1783     if (frame == NULL) {
1784         LOGE("Error allocating memory to save received_frame structure.");
1785         pChannel->bufDone(super_frame);
1786         return;
1787     }
1788     *frame = *super_frame;
1789 
1790     if (frame->num_bufs > 0) {
1791         LOGI("[KPI Perf]: superbuf frame_idx %d",
1792                 frame->bufs[0]->frame_idx);
1793     }
1794 
1795     // Wait on Postproc initialization if needed
1796     // then send to postprocessor
1797     if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1798             (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1799         LOGE("Failed to trigger process data");
1800         pChannel->bufDone(super_frame);
1801         free(frame);
1802         frame = NULL;
1803         return;
1804     }
1805 
1806     LOGH("[KPI Perf]: X");
1807 
1808 }
1809 
1810 /*===========================================================================
1811  * FUNCTION   : preview_raw_stream_cb_routine
1812  *
1813  * DESCRIPTION: helper function to handle raw frame during standard preview
1814  *
1815  * PARAMETERS :
1816  *   @super_frame : received super buffer
1817  *   @stream      : stream object
1818  *   @userdata    : user data ptr
1819  *
1820  * RETURN    : None
1821  *
1822  * NOTE      : caller passes the ownership of super_frame, it's our
1823  *             responsibility to free super_frame once it's done.
1824  *==========================================================================*/
preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1825 void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1826                                                               QCameraStream * stream,
1827                                                               void * userdata)
1828 {
1829     ATRACE_CALL();
1830     LOGH("[KPI Perf] : BEGIN");
1831     char value[PROPERTY_VALUE_MAX];
1832     bool dump_preview_raw = false, dump_video_raw = false;
1833 
1834     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1835     if (pme == NULL ||
1836         pme->mCameraHandle == NULL ||
1837         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1838         LOGE("camera obj not valid");
1839         // simply free super frame
1840         free(super_frame);
1841         return;
1842     }
1843 
1844     mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
1845 
1846     if (raw_frame != NULL) {
1847         property_get("persist.camera.preview_raw", value, "0");
1848         dump_preview_raw = atoi(value) > 0 ? true : false;
1849         property_get("persist.camera.video_raw", value, "0");
1850         dump_video_raw = atoi(value) > 0 ? true : false;
1851         if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
1852                 && dump_video_raw)) {
1853             pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1854         }
1855         stream->bufDone(raw_frame->buf_idx);
1856     }
1857     free(super_frame);
1858 
1859     LOGH("[KPI Perf] : END");
1860 }
1861 
1862 /*===========================================================================
1863  * FUNCTION   : snapshot_raw_stream_cb_routine
1864  *
1865  * DESCRIPTION: helper function to handle raw frame during standard capture
1866  *
1867  * PARAMETERS :
1868  *   @super_frame : received super buffer
1869  *   @stream      : stream object
1870  *   @userdata    : user data ptr
1871  *
1872  * RETURN    : None
1873  *
1874  * NOTE      : caller passes the ownership of super_frame, it's our
1875  *             responsibility to free super_frame once it's done.
1876  *==========================================================================*/
snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1877 void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1878                                                                QCameraStream * stream,
1879                                                                void * userdata)
1880 {
1881     ATRACE_CALL();
1882     LOGH("[KPI Perf] : BEGIN");
1883     char value[PROPERTY_VALUE_MAX];
1884     bool dump_raw = false;
1885 
1886     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1887     if (pme == NULL ||
1888         pme->mCameraHandle == NULL ||
1889         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1890         LOGE("camera obj not valid");
1891         // simply free super frame
1892         free(super_frame);
1893         return;
1894     }
1895 
1896     property_get("persist.camera.snapshot_raw", value, "0");
1897     dump_raw = atoi(value) > 0 ? true : false;
1898 
1899     for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1900         if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
1901             mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
1902             if (NULL != stream) {
1903                 if (dump_raw) {
1904                     pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1905                 }
1906                 stream->bufDone(super_frame->bufs[i]->buf_idx);
1907             }
1908             break;
1909         }
1910     }
1911 
1912     free(super_frame);
1913 
1914     LOGH("[KPI Perf] : END");
1915 }
1916 
1917 /*===========================================================================
1918  * FUNCTION   : updateMetadata
1919  *
1920  * DESCRIPTION: Frame related parameter can be updated here
1921  *
1922  * PARAMETERS :
1923  *   @pMetaData : pointer to metadata buffer
1924  *
1925  * RETURN     : int32_t type of status
1926  *              NO_ERROR  -- success
1927  *              none-zero failure code
1928  *==========================================================================*/
updateMetadata(metadata_buffer_t * pMetaData)1929 int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
1930 {
1931     int32_t rc = NO_ERROR;
1932 
1933     if (pMetaData == NULL) {
1934         LOGE("Null Metadata buffer");
1935         return rc;
1936     }
1937 
1938     // Sharpness
1939     cam_edge_application_t edge_application;
1940     memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
1941     edge_application.sharpness = mParameters.getSharpness();
1942     if (edge_application.sharpness != 0) {
1943         edge_application.edge_mode = CAM_EDGE_MODE_FAST;
1944     } else {
1945         edge_application.edge_mode = CAM_EDGE_MODE_OFF;
1946     }
1947     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1948             CAM_INTF_META_EDGE_MODE, edge_application);
1949 
1950     //Effect
1951     int32_t prmEffect = mParameters.getEffect();
1952     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
1953 
1954     //flip
1955     int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
1956     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
1957 
1958     //denoise
1959     uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
1960     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1961             CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
1962 
1963     //rotation & device rotation
1964     uint32_t prmRotation = mParameters.getJpegRotation();
1965     cam_rotation_info_t rotation_info;
1966     memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
1967     if (prmRotation == 0) {
1968        rotation_info.rotation = ROTATE_0;
1969     } else if (prmRotation == 90) {
1970        rotation_info.rotation = ROTATE_90;
1971     } else if (prmRotation == 180) {
1972        rotation_info.rotation = ROTATE_180;
1973     } else if (prmRotation == 270) {
1974        rotation_info.rotation = ROTATE_270;
1975     }
1976 
1977     uint32_t device_rotation = mParameters.getDeviceRotation();
1978     if (device_rotation == 0) {
1979         rotation_info.device_rotation = ROTATE_0;
1980     } else if (device_rotation == 90) {
1981         rotation_info.device_rotation = ROTATE_90;
1982     } else if (device_rotation == 180) {
1983         rotation_info.device_rotation = ROTATE_180;
1984     } else if (device_rotation == 270) {
1985         rotation_info.device_rotation = ROTATE_270;
1986     } else {
1987         rotation_info.device_rotation = ROTATE_0;
1988     }
1989 
1990     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
1991 
1992     // Imglib Dynamic Scene Data
1993     cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
1994     if (mParameters.isStillMoreEnabled()) {
1995         cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
1996         dyn_img_data.input_count = stillmore_cap.burst_count;
1997     }
1998     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1999             CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
2000 
2001     //CPP CDS
2002     int32_t prmCDSMode = mParameters.getCDSMode();
2003     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2004             CAM_INTF_PARM_CDS_MODE, prmCDSMode);
2005 
2006     return rc;
2007 }
2008 
2009 /*===========================================================================
2010  * FUNCTION   : metadata_stream_cb_routine
2011  *
2012  * DESCRIPTION: helper function to handle metadata frame from metadata stream
2013  *
2014  * PARAMETERS :
2015  *   @super_frame : received super buffer
2016  *   @stream      : stream object
2017  *   @userdata    : user data ptr
2018  *
2019  * RETURN    : None
2020  *
2021  * NOTE      : caller passes the ownership of super_frame, it's our
2022  *             responsibility to free super_frame once it's done. Metadata
2023  *             could have valid entries for face detection result or
2024  *             histogram statistics information.
2025  *==========================================================================*/
metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2026 void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2027                                                            QCameraStream * stream,
2028                                                            void * userdata)
2029 {
2030     ATRACE_CALL();
2031     LOGD("[KPI Perf] : BEGIN");
2032     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2033     if (pme == NULL ||
2034         pme->mCameraHandle == NULL ||
2035         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2036         LOGE("camera obj not valid");
2037         // simply free super frame
2038         free(super_frame);
2039         return;
2040     }
2041 
2042     mm_camera_buf_def_t *frame = super_frame->bufs[0];
2043     metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
2044     if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
2045        !pme->mLongshotEnabled) {
2046        //Make shutter call back in non ZSL mode once raw frame is received from VFE.
2047        pme->playShutter();
2048     }
2049 
2050     if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
2051         //Dump Tuning data for video
2052         pme->dumpMetadataToFile(stream,frame,(char *)"Video");
2053     }
2054 
2055     IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
2056         // process histogram statistics info
2057         qcamera_sm_internal_evt_payload_t *payload =
2058             (qcamera_sm_internal_evt_payload_t *)
2059                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2060         if (NULL != payload) {
2061             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2062             payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
2063             payload->stats_data = *stats_data;
2064             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2065             if (rc != NO_ERROR) {
2066                 LOGW("processEvt histogram failed");
2067                 free(payload);
2068                 payload = NULL;
2069 
2070             }
2071         } else {
2072             LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
2073         }
2074     }
2075 
2076     IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
2077             CAM_INTF_META_FACE_DETECTION, pMetaData) {
2078 
2079         cam_faces_data_t faces_data;
2080         pme->fillFacesData(faces_data, pMetaData);
2081         faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
2082 
2083         qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2084             malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2085         if (NULL != payload) {
2086             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2087             payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
2088             payload->faces_data = faces_data;
2089             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2090             if (rc != NO_ERROR) {
2091                 LOGW("processEvt face detection failed");
2092                 free(payload);
2093                 payload = NULL;
2094             }
2095         } else {
2096             LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
2097         }
2098     }
2099 
2100     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
2101         uint8_t forceAFUpdate = FALSE;
2102         //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
2103         //upper layers. But in scenarios where metadata drops especially which contain important
2104         //AF information, APP will wait indefinitely for focus result resulting in capture hang.
2105         //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
2106         //This will help overcome metadata drop issue with the earlier approach.
2107         //3. But sometimes AF state transitions can happen so fast within same metadata due to
2108         //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
2109         //state depending on the state transitions happened (for example state A -> B -> A).
2110         //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
2111         //we check state transition at both HAL level and AF module level. We rely on
2112         //'state transition' meta field set by AF module for the state transition detected by it.
2113         IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
2114             forceAFUpdate = *stateChange;
2115         }
2116         //This is a special scenario in which when scene modes like landscape are selected, AF mode
2117         //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
2118         //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
2119         //change here and trigger AF callback @ processAutoFocusEvent().
2120         IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2121             if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
2122                     pme->mActiveAF){
2123                 forceAFUpdate = TRUE;
2124             }
2125         }
2126         if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
2127             cam_af_state_t prevFocusState = pme->m_currentFocusState;
2128             pme->m_currentFocusState = (cam_af_state_t)(*afState);
2129             qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2130                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2131             if (NULL != payload) {
2132                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2133                 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
2134                 payload->focus_data.focus_state = (cam_af_state_t)(*afState);
2135                 //Need to flush ZSL Q only if we are transitioning from scanning state
2136                 //to focused/not focused state.
2137                 payload->focus_data.flush_info.needFlush =
2138                         ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
2139                         (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
2140                         ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
2141                         (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
2142                 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
2143 
2144                 IF_META_AVAILABLE(float, focusDistance,
2145                         CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
2146                     payload->focus_data.focus_dist.
2147                     focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
2148                 }
2149                 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
2150                     payload->focus_data.focus_dist.
2151                             focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
2152                     payload->focus_data.focus_dist.
2153                             focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
2154                 }
2155                 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2156                     payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
2157                 }
2158                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2159                 if (rc != NO_ERROR) {
2160                     LOGW("processEvt focus failed");
2161                     free(payload);
2162                     payload = NULL;
2163                 }
2164             } else {
2165                 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2166             }
2167         }
2168     }
2169 
2170     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
2171         if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
2172             LOGE("Invalid num_of_streams %d in crop_data",
2173                 crop_data->num_of_streams);
2174         } else {
2175             qcamera_sm_internal_evt_payload_t *payload =
2176                 (qcamera_sm_internal_evt_payload_t *)
2177                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2178             if (NULL != payload) {
2179                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2180                 payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
2181                 payload->crop_data = *crop_data;
2182                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2183                 if (rc != NO_ERROR) {
2184                     LOGE("processEvt crop info failed");
2185                     free(payload);
2186                     payload = NULL;
2187                 }
2188             } else {
2189                 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2190             }
2191         }
2192     }
2193 
2194     IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
2195             CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
2196         qcamera_sm_internal_evt_payload_t *payload =
2197         (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2198         if (NULL != payload) {
2199             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2200             payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
2201             payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
2202             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2203             if (rc != NO_ERROR) {
2204                 LOGW("processEvt prep_snapshot failed");
2205                 free(payload);
2206                 payload = NULL;
2207             }
2208         } else {
2209             LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2210         }
2211     }
2212 
2213     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
2214             CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
2215         LOGH("hdr_scene_data: %d %f\n",
2216                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
2217         //Handle this HDR meta data only if capture is not in process
2218         if (!pme->m_stateMachine.isCaptureRunning()) {
2219             qcamera_sm_internal_evt_payload_t *payload =
2220                     (qcamera_sm_internal_evt_payload_t *)
2221                     malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2222             if (NULL != payload) {
2223                 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2224                 payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
2225                 payload->hdr_data = *hdr_scene_data;
2226                 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2227                 if (rc != NO_ERROR) {
2228                     LOGW("processEvt hdr update failed");
2229                     free(payload);
2230                     payload = NULL;
2231                 }
2232             } else {
2233                 LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
2234             }
2235         }
2236     }
2237 
2238     IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
2239             CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
2240         qcamera_sm_internal_evt_payload_t *payload =
2241             (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2242         if (NULL != payload) {
2243             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2244             payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
2245             payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
2246             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2247             if (rc != NO_ERROR) {
2248                 LOGW("processEvt asd_update failed");
2249                 free(payload);
2250                 payload = NULL;
2251             }
2252         } else {
2253             LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
2254         }
2255     }
2256 
2257     IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
2258         LOGH(", metadata for awb params.");
2259         qcamera_sm_internal_evt_payload_t *payload =
2260                 (qcamera_sm_internal_evt_payload_t *)
2261                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2262         if (NULL != payload) {
2263             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2264             payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
2265             payload->awb_data = *awb_params;
2266             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2267             if (rc != NO_ERROR) {
2268                 LOGW("processEvt awb_update failed");
2269                 free(payload);
2270                 payload = NULL;
2271             }
2272         } else {
2273             LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
2274         }
2275     }
2276 
2277     IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
2278         pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
2279     }
2280 
2281     IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
2282         pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
2283     }
2284 
2285     IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
2286         pme->mExifParams.sensor_params.aperture_value = *aperture_value;
2287     }
2288 
2289     IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
2290         pme->mExifParams.cam_3a_params = *ae_params;
2291         pme->mExifParams.cam_3a_params_valid = TRUE;
2292         pme->mFlashNeeded = ae_params->flash_needed;
2293         pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
2294         qcamera_sm_internal_evt_payload_t *payload =
2295                 (qcamera_sm_internal_evt_payload_t *)
2296                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2297         if (NULL != payload) {
2298             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2299             payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
2300             payload->ae_data = *ae_params;
2301             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2302             if (rc != NO_ERROR) {
2303                 LOGW("processEvt ae_update failed");
2304                 free(payload);
2305                 payload = NULL;
2306             }
2307         } else {
2308             LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
2309         }
2310     }
2311 
2312     IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
2313         pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
2314     }
2315 
2316     IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
2317         pme->mExifParams.sensor_params = *sensor_params;
2318     }
2319 
2320     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
2321             CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
2322         if (pme->mExifParams.debug_params) {
2323             pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
2324             pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
2325         }
2326     }
2327 
2328     IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
2329             CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
2330         if (pme->mExifParams.debug_params) {
2331             pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
2332             pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
2333         }
2334     }
2335 
2336     IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
2337             CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
2338         if (pme->mExifParams.debug_params) {
2339             pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
2340             pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
2341         }
2342     }
2343 
2344     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
2345             CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
2346         if (pme->mExifParams.debug_params) {
2347             pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
2348             pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
2349         }
2350     }
2351 
2352     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
2353             CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
2354         if (pme->mExifParams.debug_params) {
2355             pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
2356             pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
2357         }
2358     }
2359 
2360     IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
2361         qcamera_sm_internal_evt_payload_t *payload =
2362                 (qcamera_sm_internal_evt_payload_t *)
2363                 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2364         if (NULL != payload) {
2365             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2366             payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
2367             payload->led_data = (cam_flash_mode_t)*led_mode;
2368             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2369             if (rc != NO_ERROR) {
2370                 LOGW("processEvt led mode override failed");
2371                 free(payload);
2372                 payload = NULL;
2373             }
2374         } else {
2375             LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2376         }
2377     }
2378 
2379     cam_edge_application_t edge_application;
2380     memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
2381     edge_application.sharpness = pme->mParameters.getSharpness();
2382     if (edge_application.sharpness != 0) {
2383         edge_application.edge_mode = CAM_EDGE_MODE_FAST;
2384     } else {
2385         edge_application.edge_mode = CAM_EDGE_MODE_OFF;
2386     }
2387     ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
2388 
2389     IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
2390             CAM_INTF_META_FOCUS_POSITION, pMetaData) {
2391         qcamera_sm_internal_evt_payload_t *payload =
2392             (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2393         if (NULL != payload) {
2394             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2395             payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
2396             payload->focus_pos = *cur_pos_info;
2397             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2398             if (rc != NO_ERROR) {
2399                 LOGW("processEvt focus_pos_update failed");
2400                 free(payload);
2401                 payload = NULL;
2402             }
2403         } else {
2404             LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
2405         }
2406     }
2407 
2408     if (pme->mParameters.getLowLightCapture()) {
2409         IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
2410                 CAM_INTF_META_LOW_LIGHT, pMetaData) {
2411             pme->mParameters.setLowLightLevel(*low_light_level);
2412         }
2413     }
2414 
2415     IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
2416             CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
2417         pme->mParameters.setDynamicImgData(*dyn_img_data);
2418     }
2419 
2420     IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
2421       LOGD("touch_ae_status: %d", *touch_ae_status);
2422     }
2423 
2424     stream->bufDone(frame->buf_idx);
2425     free(super_frame);
2426 
2427     LOGD("[KPI Perf] : END");
2428 }
2429 
2430 /*===========================================================================
2431  * FUNCTION   : reprocess_stream_cb_routine
2432  *
2433  * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
2434                 (after reprocess, e.g., ZSL snapshot frame after WNR if
2435  *              WNR is enabled)
2436  *
2437  * PARAMETERS :
2438  *   @super_frame : received super buffer
2439  *   @stream      : stream object
2440  *   @userdata    : user data ptr
2441  *
2442  * RETURN    : None
2443  *
2444  * NOTE      : caller passes the ownership of super_frame, it's our
2445  *             responsibility to free super_frame once it's done. In this
2446  *             case, reprocessed frame need to be passed to postprocessor
2447  *             for jpeg encoding.
2448  *==========================================================================*/
reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)2449 void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2450                                                             QCameraStream * /*stream*/,
2451                                                             void * userdata)
2452 {
2453     ATRACE_CALL();
2454     LOGH("[KPI Perf]: E");
2455     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2456     if (pme == NULL ||
2457         pme->mCameraHandle == NULL ||
2458         pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2459         LOGE("camera obj not valid");
2460         // simply free super frame
2461         free(super_frame);
2462         return;
2463     }
2464 
2465     pme->m_postprocessor.processPPData(super_frame);
2466 
2467     LOGH("[KPI Perf]: X");
2468 }
2469 
2470 /*===========================================================================
2471  * FUNCTION   : callback_stream_cb_routine
2472  *
2473  * DESCRIPTION: function to process CALBACK stream data
2474                            Frame will processed and sent to framework
2475  *
2476  * PARAMETERS :
2477  *   @super_frame : received super buffer
2478  *   @stream      : stream object
2479  *   @userdata    : user data ptr
2480  *
2481  * RETURN    : None
2482  *==========================================================================*/
callback_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2483 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
2484         QCameraStream *stream, void *userdata)
2485 {
2486     ATRACE_CALL();
2487     LOGH("[KPI Perf]: E");
2488     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2489 
2490     if (pme == NULL ||
2491             pme->mCameraHandle == NULL ||
2492             pme->mCameraHandle->camera_handle != super_frame->camera_handle) {
2493         LOGE("camera obj not valid");
2494         // simply free super frame
2495         free(super_frame);
2496         return;
2497     }
2498 
2499     mm_camera_buf_def_t *frame = super_frame->bufs[0];
2500     if (NULL == frame) {
2501         LOGE("preview callback frame is NULL");
2502         free(super_frame);
2503         return;
2504     }
2505 
2506     if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
2507         LOGH("preview is not running, no need to process");
2508         stream->bufDone(frame->buf_idx);
2509         free(super_frame);
2510         return;
2511     }
2512 
2513     QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
2514     // Handle preview data callback
2515     if (pme->mDataCb != NULL &&
2516             (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
2517             (!pme->mParameters.isSceneSelectionEnabled())) {
2518         int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
2519         if (NO_ERROR != rc) {
2520             LOGE("Preview callback was not sent succesfully");
2521         }
2522     }
2523     stream->bufDone(frame->buf_idx);
2524     free(super_frame);
2525     LOGH("[KPI Perf]: X");
2526 }
2527 
2528 /*===========================================================================
2529  * FUNCTION   : dumpFrameToFile
2530  *
2531  * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
2532  *
2533  * PARAMETERS :
2534  *    @data : data ptr
2535  *    @size : length of data buffer
2536  *    @index : identifier for data
2537  *
2538  * RETURN     : None
2539  *==========================================================================*/
dumpJpegToFile(const void * data,size_t size,uint32_t index)2540 void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
2541         size_t size, uint32_t index)
2542 {
2543     char value[PROPERTY_VALUE_MAX];
2544     property_get("persist.camera.dumpimg", value, "0");
2545     uint32_t enabled = (uint32_t) atoi(value);
2546     uint32_t frm_num = 0;
2547     uint32_t skip_mode = 0;
2548 
2549     char buf[32];
2550     cam_dimension_t dim;
2551     memset(buf, 0, sizeof(buf));
2552     memset(&dim, 0, sizeof(dim));
2553 
2554     if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
2555         ((true == m_bIntJpegEvtPending) && data)) {
2556         frm_num = ((enabled & 0xffff0000) >> 16);
2557         if(frm_num == 0) {
2558             frm_num = 10; //default 10 frames
2559         }
2560         if(frm_num > 256) {
2561             frm_num = 256; //256 buffers cycle around
2562         }
2563         skip_mode = ((enabled & 0x0000ff00) >> 8);
2564         if(skip_mode == 0) {
2565             skip_mode = 1; //no-skip
2566         }
2567 
2568         if( mDumpSkipCnt % skip_mode == 0) {
2569             if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
2570                 // reset frame count if cycling
2571                 mDumpFrmCnt = 0;
2572             }
2573             if (mDumpFrmCnt <= frm_num) {
2574                 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
2575                         mDumpFrmCnt, index);
2576                 if (true == m_bIntJpegEvtPending) {
2577                     strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
2578                     mBackendFileSize = size;
2579                 }
2580 
2581                 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
2582                 if (file_fd >= 0) {
2583                     ssize_t written_len = write(file_fd, data, size);
2584                     fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2585                     LOGH("written number of bytes %zd\n",
2586                              written_len);
2587                     close(file_fd);
2588                 } else {
2589                     LOGE("fail to open file for image dumping");
2590                 }
2591                 if (false == m_bIntJpegEvtPending) {
2592                     mDumpFrmCnt++;
2593                 }
2594             }
2595         }
2596         mDumpSkipCnt++;
2597     }
2598 }
2599 
2600 
dumpMetadataToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,char * type)2601 void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
2602                                                    mm_camera_buf_def_t *frame,char *type)
2603 {
2604     char value[PROPERTY_VALUE_MAX];
2605     uint32_t frm_num = 0;
2606     metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
2607     property_get("persist.camera.dumpmetadata", value, "0");
2608     uint32_t enabled = (uint32_t) atoi(value);
2609     if (stream == NULL) {
2610         LOGH("No op");
2611         return;
2612     }
2613 
2614     uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
2615     if(enabled){
2616         frm_num = ((enabled & 0xffff0000) >> 16);
2617         if (frm_num == 0) {
2618             frm_num = 10; //default 10 frames
2619         }
2620         if (frm_num > 256) {
2621             frm_num = 256; //256 buffers cycle around
2622         }
2623         if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2624             // reset frame count if cycling
2625             dumpFrmCnt = 0;
2626         }
2627         LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
2628         if (dumpFrmCnt < frm_num) {
2629             char timeBuf[128];
2630             char buf[32];
2631             memset(buf, 0, sizeof(buf));
2632             memset(timeBuf, 0, sizeof(timeBuf));
2633             time_t current_time;
2634             struct tm * timeinfo;
2635             time (&current_time);
2636             timeinfo = localtime (&current_time);
2637             if (NULL != timeinfo) {
2638                 strftime(timeBuf, sizeof(timeBuf),
2639                         QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2640             }
2641             String8 filePath(timeBuf);
2642             snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
2643             filePath.append(buf);
2644             int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2645             if (file_fd >= 0) {
2646                 ssize_t written_len = 0;
2647                 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
2648                 void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
2649                 written_len += write(file_fd, data, sizeof(uint32_t));
2650                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
2651                 LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
2652                 written_len += write(file_fd, data, sizeof(uint32_t));
2653                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
2654                 LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
2655                 written_len += write(file_fd, data, sizeof(uint32_t));
2656                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
2657                 LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
2658                 written_len += write(file_fd, data, sizeof(uint32_t));
2659                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
2660                 LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
2661                 written_len += write(file_fd, data, sizeof(uint32_t));
2662                 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
2663                 LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
2664                 written_len += write(file_fd, data, sizeof(uint32_t));
2665                 size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
2666                 data = (void *)((uint8_t *)&metadata->tuning_params.data);
2667                 written_len += write(file_fd, data, total_size);
2668                 total_size = metadata->tuning_params.tuning_vfe_data_size;
2669                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
2670                 written_len += write(file_fd, data, total_size);
2671                 total_size = metadata->tuning_params.tuning_cpp_data_size;
2672                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
2673                 written_len += write(file_fd, data, total_size);
2674                 total_size = metadata->tuning_params.tuning_cac_data_size;
2675                 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
2676                 written_len += write(file_fd, data, total_size);
2677                 close(file_fd);
2678             }else {
2679                 LOGE("fail t open file for image dumping");
2680             }
2681             dumpFrmCnt++;
2682         }
2683     }
2684     stream->mDumpMetaFrame = dumpFrmCnt;
2685 }
2686 /*===========================================================================
2687  * FUNCTION   : dumpFrameToFile
2688  *
2689  * DESCRIPTION: helper function to dump frame into file for debug purpose.
2690  *
2691  * PARAMETERS :
2692  *    @data : data ptr
2693  *    @size : length of data buffer
2694  *    @index : identifier for data
2695  *    @dump_type : type of the frame to be dumped. Only such
2696  *                 dump type is enabled, the frame will be
2697  *                 dumped into a file.
2698  *
2699  * RETURN     : None
2700  *==========================================================================*/
dumpFrameToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,uint32_t dump_type,const char * misc)2701 void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
2702         mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
2703 {
2704     char value[PROPERTY_VALUE_MAX];
2705     property_get("persist.camera.dumpimg", value, "0");
2706     uint32_t enabled = (uint32_t) atoi(value);
2707     uint32_t frm_num = 0;
2708     uint32_t skip_mode = 0;
2709 
2710     if (NULL == stream) {
2711         LOGE("stream object is null");
2712         return;
2713     }
2714 
2715     uint32_t dumpFrmCnt = stream->mDumpFrame;
2716 
2717     if (true == m_bIntRawEvtPending) {
2718         enabled = QCAMERA_DUMP_FRM_RAW;
2719     }
2720 
2721     if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
2722         if((enabled & dump_type) && stream && frame) {
2723             frm_num = ((enabled & 0xffff0000) >> 16);
2724             if(frm_num == 0) {
2725                 frm_num = 10; //default 10 frames
2726             }
2727             if(frm_num > 256) {
2728                 frm_num = 256; //256 buffers cycle around
2729             }
2730             skip_mode = ((enabled & 0x0000ff00) >> 8);
2731             if(skip_mode == 0) {
2732                 skip_mode = 1; //no-skip
2733             }
2734             if(stream->mDumpSkipCnt == 0)
2735                 stream->mDumpSkipCnt = 1;
2736 
2737             if( stream->mDumpSkipCnt % skip_mode == 0) {
2738                 if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2739                     // reset frame count if cycling
2740                     dumpFrmCnt = 0;
2741                 }
2742                 if (dumpFrmCnt <= frm_num) {
2743                     char buf[32];
2744                     char timeBuf[128];
2745                     time_t current_time;
2746                     struct tm * timeinfo;
2747 
2748                     memset(timeBuf, 0, sizeof(timeBuf));
2749 
2750                     time (&current_time);
2751                     timeinfo = localtime (&current_time);
2752                     memset(buf, 0, sizeof(buf));
2753 
2754                     cam_dimension_t dim;
2755                     memset(&dim, 0, sizeof(dim));
2756                     stream->getFrameDimension(dim);
2757 
2758                     cam_frame_len_offset_t offset;
2759                     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2760                     stream->getFrameOffset(offset);
2761 
2762                     if (NULL != timeinfo) {
2763                         strftime(timeBuf, sizeof(timeBuf),
2764                                 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2765                     }
2766                     String8 filePath(timeBuf);
2767                     switch (dump_type) {
2768                     case QCAMERA_DUMP_FRM_PREVIEW:
2769                         {
2770                             snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
2771                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2772                         }
2773                         break;
2774                     case QCAMERA_DUMP_FRM_THUMBNAIL:
2775                         {
2776                             snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
2777                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2778                         }
2779                         break;
2780                     case QCAMERA_DUMP_FRM_SNAPSHOT:
2781                         {
2782                             if (!mParameters.isPostProcScaling()) {
2783                                 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2784                             } else {
2785                                 stream->getFrameDimension(dim);
2786                             }
2787                             if (misc != NULL) {
2788                                 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
2789                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2790                             } else {
2791                                 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
2792                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2793                             }
2794                         }
2795                         break;
2796                     case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
2797                         {
2798                             stream->getFrameDimension(dim);
2799                             if (misc != NULL) {
2800                                 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
2801                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2802                             } else {
2803                                 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
2804                                         dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2805                             }
2806                         }
2807                         break;
2808                     case QCAMERA_DUMP_FRM_VIDEO:
2809                         {
2810                             snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
2811                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2812                         }
2813                         break;
2814                     case QCAMERA_DUMP_FRM_RAW:
2815                         {
2816                             mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
2817                             snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
2818                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2819                         }
2820                         break;
2821                     case QCAMERA_DUMP_FRM_JPEG:
2822                         {
2823                             mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2824                             snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
2825                                     dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2826                         }
2827                         break;
2828                     default:
2829                         LOGE("Not supported for dumping stream type %d",
2830                                dump_type);
2831                         return;
2832                     }
2833 
2834                     filePath.append(buf);
2835                     int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2836                     ssize_t written_len = 0;
2837                     if (file_fd >= 0) {
2838                         void *data = NULL;
2839 
2840                         fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2841                         for (uint32_t i = 0; i < offset.num_planes; i++) {
2842                             uint32_t index = offset.mp[i].offset;
2843                             if (i > 0) {
2844                                 index += offset.mp[i-1].len;
2845                             }
2846 
2847                             if (offset.mp[i].meta_len != 0) {
2848                                 data = (void *)((uint8_t *)frame->buffer + index);
2849                                 written_len += write(file_fd, data,
2850                                         (size_t)offset.mp[i].meta_len);
2851                                 index += (uint32_t)offset.mp[i].meta_len;
2852                             }
2853 
2854                             for (int j = 0; j < offset.mp[i].height; j++) {
2855                                 data = (void *)((uint8_t *)frame->buffer + index);
2856                                 written_len += write(file_fd, data,
2857                                         (size_t)offset.mp[i].width);
2858                                 index += (uint32_t)offset.mp[i].stride;
2859                             }
2860                         }
2861 
2862                         LOGH("written number of bytes %ld\n",
2863                              written_len);
2864                         close(file_fd);
2865                     } else {
2866                         LOGE("fail to open file for image dumping");
2867                     }
2868                     if (true == m_bIntRawEvtPending) {
2869                         strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
2870                         mBackendFileSize = (size_t)written_len;
2871                     } else {
2872                         dumpFrmCnt++;
2873                     }
2874                 }
2875             }
2876             stream->mDumpSkipCnt++;
2877         }
2878     } else {
2879         dumpFrmCnt = 0;
2880     }
2881     stream->mDumpFrame = dumpFrmCnt;
2882 }
2883 
2884 /*===========================================================================
2885  * FUNCTION   : debugShowVideoFPS
2886  *
2887  * DESCRIPTION: helper function to log video frame FPS for debug purpose.
2888  *
2889  * PARAMETERS : None
2890  *
2891  * RETURN     : None
2892  *==========================================================================*/
debugShowVideoFPS()2893 void QCamera2HardwareInterface::debugShowVideoFPS()
2894 {
2895     mVFrameCount++;
2896     nsecs_t now = systemTime();
2897     nsecs_t diff = now - mVLastFpsTime;
2898     if (diff > ms2ns(250)) {
2899         mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
2900                 (double)(s2ns(1))) / (double)diff;
2901         LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
2902                 mVFps, mCameraId);
2903         mVLastFpsTime = now;
2904         mVLastFrameCount = mVFrameCount;
2905     }
2906 }
2907 
2908 /*===========================================================================
2909  * FUNCTION   : debugShowPreviewFPS
2910  *
2911  * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
2912  *
2913  * PARAMETERS : None
2914  *
2915  * RETURN     : None
2916  *==========================================================================*/
debugShowPreviewFPS()2917 void QCamera2HardwareInterface::debugShowPreviewFPS()
2918 {
2919     mPFrameCount++;
2920     nsecs_t now = systemTime();
2921     nsecs_t diff = now - mPLastFpsTime;
2922     if (diff > ms2ns(250)) {
2923         mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
2924                 (double)(s2ns(1))) / (double)diff;
2925         LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
2926                  mPFps, mCameraId);
2927         mPLastFpsTime = now;
2928         mPLastFrameCount = mPFrameCount;
2929     }
2930 }
2931 
2932 /*===========================================================================
2933  * FUNCTION   : fillFacesData
2934  *
2935  * DESCRIPTION: helper function to fill in face related metadata into a struct.
2936  *
2937  * PARAMETERS :
2938  *   @faces_data : face features data to be filled
2939  *   @metadata   : metadata structure to read face features from
2940  *
2941  * RETURN     : None
2942  *==========================================================================*/
fillFacesData(cam_faces_data_t & faces_data,metadata_buffer_t * metadata)2943 void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
2944         metadata_buffer_t *metadata)
2945 {
2946     memset(&faces_data, 0, sizeof(cam_faces_data_t));
2947 
2948     IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
2949             CAM_INTF_META_FACE_DETECTION, metadata) {
2950         faces_data.detection_data = *p_detection_data;
2951         if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
2952             faces_data.detection_data.num_faces_detected = MAX_ROI;
2953         }
2954 
2955         LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
2956                 faces_data.detection_data.num_faces_detected);
2957 
2958         IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
2959                 CAM_INTF_META_FACE_RECOG, metadata) {
2960             faces_data.recog_valid = true;
2961             faces_data.recog_data = *p_recog_data;
2962         }
2963 
2964         IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
2965                 CAM_INTF_META_FACE_BLINK, metadata) {
2966             faces_data.blink_valid = true;
2967             faces_data.blink_data = *p_blink_data;
2968         }
2969 
2970         IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
2971                 CAM_INTF_META_FACE_GAZE, metadata) {
2972             faces_data.gaze_valid = true;
2973             faces_data.gaze_data = *p_gaze_data;
2974         }
2975 
2976         IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
2977                 CAM_INTF_META_FACE_SMILE, metadata) {
2978             faces_data.smile_valid = true;
2979             faces_data.smile_data = *p_smile_data;
2980         }
2981 
2982         IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
2983                 CAM_INTF_META_FACE_LANDMARK, metadata) {
2984             faces_data.landmark_valid = true;
2985             faces_data.landmark_data = *p_landmarks;
2986         }
2987 
2988         IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
2989                 CAM_INTF_META_FACE_CONTOUR, metadata) {
2990             faces_data.contour_valid = true;
2991             faces_data.contour_data = *p_contour;
2992         }
2993     }
2994 }
2995 
2996 /*===========================================================================
2997  * FUNCTION   : ~QCameraCbNotifier
2998  *
2999  * DESCRIPTION: Destructor for exiting the callback context.
3000  *
3001  * PARAMETERS : None
3002  *
3003  * RETURN     : None
3004  *==========================================================================*/
~QCameraCbNotifier()3005 QCameraCbNotifier::~QCameraCbNotifier()
3006 {
3007 }
3008 
3009 /*===========================================================================
3010  * FUNCTION   : exit
3011  *
3012  * DESCRIPTION: exit notify thread.
3013  *
3014  * PARAMETERS : None
3015  *
3016  * RETURN     : None
3017  *==========================================================================*/
exit()3018 void QCameraCbNotifier::exit()
3019 {
3020     mActive = false;
3021     mProcTh.exit();
3022 }
3023 
3024 /*===========================================================================
3025  * FUNCTION   : releaseNotifications
3026  *
3027  * DESCRIPTION: callback for releasing data stored in the callback queue.
3028  *
3029  * PARAMETERS :
3030  *   @data      : data to be released
3031  *   @user_data : context data
3032  *
3033  * RETURN     : None
3034  *==========================================================================*/
releaseNotifications(void * data,void * user_data)3035 void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
3036 {
3037     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3038 
3039     if ( ( NULL != arg ) && ( NULL != user_data ) ) {
3040         if ( arg->release_cb ) {
3041             arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
3042         }
3043     }
3044 }
3045 
3046 /*===========================================================================
3047  * FUNCTION   : matchSnapshotNotifications
3048  *
3049  * DESCRIPTION: matches snapshot data callbacks
3050  *
3051  * PARAMETERS :
3052  *   @data      : data to match
3053  *   @user_data : context data
3054  *
3055  * RETURN     : bool match
3056  *              true - match found
3057  *              false- match not found
3058  *==========================================================================*/
matchSnapshotNotifications(void * data,void *)3059 bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
3060                                                    void */*user_data*/)
3061 {
3062     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3063     if ( NULL != arg ) {
3064         if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
3065             return true;
3066         }
3067     }
3068 
3069     return false;
3070 }
3071 
3072 /*===========================================================================
3073  * FUNCTION   : matchPreviewNotifications
3074  *
3075  * DESCRIPTION: matches preview data callbacks
3076  *
3077  * PARAMETERS :
3078  *   @data      : data to match
3079  *   @user_data : context data
3080  *
3081  * RETURN     : bool match
3082  *              true - match found
3083  *              false- match not found
3084  *==========================================================================*/
matchPreviewNotifications(void * data,void *)3085 bool QCameraCbNotifier::matchPreviewNotifications(void *data,
3086         void */*user_data*/)
3087 {
3088     qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3089     if (NULL != arg) {
3090         if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
3091                 (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
3092             return true;
3093         }
3094     }
3095 
3096     return false;
3097 }
3098 
3099 /*===========================================================================
3100  * FUNCTION   : cbNotifyRoutine
3101  *
3102  * DESCRIPTION: callback thread which interfaces with the upper layers
3103  *              given input commands.
3104  *
3105  * PARAMETERS :
3106  *   @data    : context data
3107  *
3108  * RETURN     : None
3109  *==========================================================================*/
cbNotifyRoutine(void * data)3110 void * QCameraCbNotifier::cbNotifyRoutine(void * data)
3111 {
3112     int running = 1;
3113     int ret;
3114     QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
3115     QCameraCmdThread *cmdThread = &pme->mProcTh;
3116     cmdThread->setName("CAM_cbNotify");
3117     uint8_t isSnapshotActive = FALSE;
3118     bool longShotEnabled = false;
3119     uint32_t numOfSnapshotExpected = 0;
3120     uint32_t numOfSnapshotRcvd = 0;
3121     int32_t cbStatus = NO_ERROR;
3122 
3123     LOGD("E");
3124     do {
3125         do {
3126             ret = cam_sem_wait(&cmdThread->cmd_sem);
3127             if (ret != 0 && errno != EINVAL) {
3128                 LOGD("cam_sem_wait error (%s)",
3129                             strerror(errno));
3130                 return NULL;
3131             }
3132         } while (ret != 0);
3133 
3134         camera_cmd_type_t cmd = cmdThread->getCmd();
3135         LOGD("get cmd %d", cmd);
3136         switch (cmd) {
3137         case CAMERA_CMD_TYPE_START_DATA_PROC:
3138             {
3139                 isSnapshotActive = TRUE;
3140                 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
3141                 longShotEnabled = pme->mParent->isLongshotEnabled();
3142                 LOGD("Num Snapshots Expected = %d",
3143                    numOfSnapshotExpected);
3144                 numOfSnapshotRcvd = 0;
3145             }
3146             break;
3147         case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3148             {
3149                 pme->mDataQ.flushNodes(matchSnapshotNotifications);
3150                 isSnapshotActive = FALSE;
3151 
3152                 numOfSnapshotExpected = 0;
3153                 numOfSnapshotRcvd = 0;
3154             }
3155             break;
3156         case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3157             {
3158                 qcamera_callback_argm_t *cb =
3159                     (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
3160                 cbStatus = NO_ERROR;
3161                 if (NULL != cb) {
3162                     LOGD("cb type %d received",
3163                           cb->cb_type);
3164 
3165                     if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
3166                         switch (cb->cb_type) {
3167                         case QCAMERA_NOTIFY_CALLBACK:
3168                             {
3169                                 if (cb->msg_type == CAMERA_MSG_FOCUS) {
3170                                     KPI_ATRACE_INT("Camera:AutoFocus", 0);
3171                                     LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
3172                                 }
3173                                 if (pme->mNotifyCb) {
3174                                     pme->mNotifyCb(cb->msg_type,
3175                                                   cb->ext1,
3176                                                   cb->ext2,
3177                                                   pme->mCallbackCookie);
3178                                 } else {
3179                                     LOGE("notify callback not set!");
3180                                 }
3181                                 if (cb->release_cb) {
3182                                     cb->release_cb(cb->user_data, cb->cookie,
3183                                             cbStatus);
3184                                 }
3185                             }
3186                             break;
3187                         case QCAMERA_DATA_CALLBACK:
3188                             {
3189                                 if (pme->mDataCb) {
3190                                     pme->mDataCb(cb->msg_type,
3191                                                  cb->data,
3192                                                  cb->index,
3193                                                  cb->metadata,
3194                                                  pme->mCallbackCookie);
3195                                 } else {
3196                                     LOGE("data callback not set!");
3197                                 }
3198                                 if (cb->release_cb) {
3199                                     cb->release_cb(cb->user_data, cb->cookie,
3200                                             cbStatus);
3201                                 }
3202                             }
3203                             break;
3204                         case QCAMERA_DATA_TIMESTAMP_CALLBACK:
3205                             {
3206                                 if(pme->mDataCbTimestamp) {
3207                                     pme->mDataCbTimestamp(cb->timestamp,
3208                                                           cb->msg_type,
3209                                                           cb->data,
3210                                                           cb->index,
3211                                                           pme->mCallbackCookie);
3212                                 } else {
3213                                     LOGE("Timestamp data callback not set!");
3214                                 }
3215                                 if (cb->release_cb) {
3216                                     cb->release_cb(cb->user_data, cb->cookie,
3217                                             cbStatus);
3218                                 }
3219                             }
3220                             break;
3221                         case QCAMERA_DATA_SNAPSHOT_CALLBACK:
3222                             {
3223                                 if (TRUE == isSnapshotActive && pme->mDataCb ) {
3224                                     if (!longShotEnabled) {
3225                                         numOfSnapshotRcvd++;
3226                                         LOGI("Num Snapshots Received = %d Expected = %d",
3227                                                 numOfSnapshotRcvd, numOfSnapshotExpected);
3228                                         if (numOfSnapshotExpected > 0 &&
3229                                            (numOfSnapshotExpected == numOfSnapshotRcvd)) {
3230                                             LOGI("Received all snapshots");
3231                                             // notify HWI that snapshot is done
3232                                             pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
3233                                                                          NULL);
3234                                         }
3235                                     }
3236                                     if (pme->mJpegCb) {
3237                                         LOGI("Calling JPEG Callback!! for camera %d"
3238                                                 "release_data %p",
3239                                                 "frame_idx %d",
3240                                                  pme->mParent->getCameraId(),
3241                                                 cb->user_data,
3242                                                 cb->frame_index);
3243                                         pme->mJpegCb(cb->msg_type, cb->data,
3244                                                 cb->index, cb->metadata,
3245                                                 pme->mJpegCallbackCookie,
3246                                                 cb->frame_index, cb->release_cb,
3247                                                 cb->cookie, cb->user_data);
3248                                         // incase of non-null Jpeg cb we transfer
3249                                         // ownership of buffer to muxer. hence
3250                                         // release_cb should not be called
3251                                         // muxer will release after its done with
3252                                         // processing the buffer
3253                                     }
3254                                     else {
3255                                         pme->mDataCb(cb->msg_type, cb->data, cb->index,
3256                                                 cb->metadata, pme->mCallbackCookie);
3257                                         if (cb->release_cb) {
3258                                             cb->release_cb(cb->user_data, cb->cookie,
3259                                                     cbStatus);
3260                                         }
3261                                     }
3262                                 }
3263                             }
3264                             break;
3265                         default:
3266                             {
3267                                 LOGE("invalid cb type %d",
3268                                       cb->cb_type);
3269                                 cbStatus = BAD_VALUE;
3270                                 if (cb->release_cb) {
3271                                     cb->release_cb(cb->user_data, cb->cookie,
3272                                             cbStatus);
3273                                 }
3274                             }
3275                             break;
3276                         };
3277                     } else {
3278                         LOGE("cb message type %d not enabled!",
3279                               cb->msg_type);
3280                         cbStatus = INVALID_OPERATION;
3281                         if (cb->release_cb) {
3282                             cb->release_cb(cb->user_data, cb->cookie, cbStatus);
3283                         }
3284                     }
3285                 delete cb;
3286                 } else {
3287                     LOGE("invalid cb type passed");
3288                 }
3289             }
3290             break;
3291         case CAMERA_CMD_TYPE_EXIT:
3292             {
3293                 running = 0;
3294                 pme->mDataQ.flush();
3295             }
3296             break;
3297         default:
3298             break;
3299         }
3300     } while (running);
3301     LOGD("X");
3302 
3303     return NULL;
3304 }
3305 
3306 /*===========================================================================
3307  * FUNCTION   : notifyCallback
3308  *
3309  * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
3310  *
3311  * PARAMETERS :
3312  *   @cbArgs  : callback arguments
3313  *
3314  * RETURN     : int32_t type of status
3315  *              NO_ERROR  -- success
3316  *              none-zero failure code
3317  *==========================================================================*/
notifyCallback(qcamera_callback_argm_t & cbArgs)3318 int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
3319 {
3320     if (!mActive) {
3321         LOGE("notify thread is not active");
3322         return UNKNOWN_ERROR;
3323     }
3324 
3325     qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
3326     if (NULL == cbArg) {
3327         LOGE("no mem for qcamera_callback_argm_t");
3328         return NO_MEMORY;
3329     }
3330     memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
3331     *cbArg = cbArgs;
3332 
3333     if (mDataQ.enqueue((void *)cbArg)) {
3334         return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
3335     } else {
3336         LOGE("Error adding cb data into queue");
3337         delete cbArg;
3338         return UNKNOWN_ERROR;
3339     }
3340 }
3341 
3342 /*===========================================================================
3343  * FUNCTION   : setCallbacks
3344  *
3345  * DESCRIPTION: Initializes the callback functions, which would be used for
3346  *              communication with the upper layers and launches the callback
3347  *              context in which the callbacks will occur.
3348  *
3349  * PARAMETERS :
3350  *   @notifyCb          : notification callback
3351  *   @dataCb            : data callback
3352  *   @dataCbTimestamp   : data with timestamp callback
3353  *   @callbackCookie    : callback context data
3354  *
3355  * RETURN     : None
3356  *==========================================================================*/
setCallbacks(camera_notify_callback notifyCb,camera_data_callback dataCb,camera_data_timestamp_callback dataCbTimestamp,void * callbackCookie)3357 void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
3358                                      camera_data_callback dataCb,
3359                                      camera_data_timestamp_callback dataCbTimestamp,
3360                                      void *callbackCookie)
3361 {
3362     if ( ( NULL == mNotifyCb ) &&
3363          ( NULL == mDataCb ) &&
3364          ( NULL == mDataCbTimestamp ) &&
3365          ( NULL == mCallbackCookie ) ) {
3366         mNotifyCb = notifyCb;
3367         mDataCb = dataCb;
3368         mDataCbTimestamp = dataCbTimestamp;
3369         mCallbackCookie = callbackCookie;
3370         mActive = true;
3371         mProcTh.launch(cbNotifyRoutine, this);
3372     } else {
3373         LOGE("Camera callback notifier already initialized!");
3374     }
3375 }
3376 
3377 /*===========================================================================
3378  * FUNCTION   : setJpegCallBacks
3379  *
3380  * DESCRIPTION: Initializes the JPEG callback function, which would be used for
3381  *              communication with the upper layers and launches the callback
3382  *              context in which the callbacks will occur.
3383  *
3384  * PARAMETERS :
3385  *   @jpegCb          : notification callback
3386  *   @callbackCookie    : callback context data
3387  *
3388  * RETURN     : None
3389  *==========================================================================*/
setJpegCallBacks(jpeg_data_callback jpegCb,void * callbackCookie)3390 void QCameraCbNotifier::setJpegCallBacks(
3391         jpeg_data_callback jpegCb, void *callbackCookie)
3392 {
3393     LOGH("Setting JPEG Callback notifier");
3394     mJpegCb        = jpegCb;
3395     mJpegCallbackCookie  = callbackCookie;
3396 }
3397 
3398 /*===========================================================================
3399  * FUNCTION   : flushPreviewNotifications
3400  *
3401  * DESCRIPTION: flush all pending preview notifications
3402  *              from the notifier queue
3403  *
3404  * PARAMETERS : None
3405  *
3406  * RETURN     : int32_t type of status
3407  *              NO_ERROR  -- success
3408  *              none-zero failure code
3409  *==========================================================================*/
flushPreviewNotifications()3410 int32_t QCameraCbNotifier::flushPreviewNotifications()
3411 {
3412     if (!mActive) {
3413         LOGE("notify thread is not active");
3414         return UNKNOWN_ERROR;
3415     }
3416 
3417     mDataQ.flushNodes(matchPreviewNotifications);
3418 
3419     return NO_ERROR;
3420 }
3421 
3422 /*===========================================================================
3423  * FUNCTION   : startSnapshots
3424  *
3425  * DESCRIPTION: Enables snapshot mode
3426  *
3427  * PARAMETERS : None
3428  *
3429  * RETURN     : int32_t type of status
3430  *              NO_ERROR  -- success
3431  *              none-zero failure code
3432  *==========================================================================*/
startSnapshots()3433 int32_t QCameraCbNotifier::startSnapshots()
3434 {
3435     return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
3436 }
3437 
3438 /*===========================================================================
3439  * FUNCTION   : stopSnapshots
3440  *
3441  * DESCRIPTION: Disables snapshot processing mode
3442  *
3443  * PARAMETERS : None
3444  *
3445  * RETURN     : None
3446  *==========================================================================*/
stopSnapshots()3447 void QCameraCbNotifier::stopSnapshots()
3448 {
3449     mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
3450 }
3451 
3452 }; // namespace qcamera
3453