1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCameraPostProc"
31
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <utils/Errors.h>
37
38 // Camera dependencies
39 #include "QCamera2HWI.h"
40 #include "QCameraPostProc.h"
41 #include "QCameraTrace.h"
42
43 extern "C" {
44 #include "mm_camera_dbg.h"
45 }
46
47 namespace qcamera {
48
49 const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
50
51 #define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt) \
52 int jpeg_bufs; \
53 for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++) { \
54 if (ptr[jpeg_bufs] != NULL) { \
55 free(ptr[jpeg_bufs]); \
56 ptr[jpeg_bufs] = NULL; \
57 } \
58 }
59
60 /*===========================================================================
61 * FUNCTION : QCameraPostProcessor
62 *
63 * DESCRIPTION: constructor of QCameraPostProcessor.
64 *
65 * PARAMETERS :
66 * @cam_ctrl : ptr to HWI object
67 *
68 * RETURN : None
69 *==========================================================================*/
QCameraPostProcessor(QCamera2HardwareInterface * cam_ctrl)70 QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
71 : m_parent(cam_ctrl),
72 mJpegCB(NULL),
73 mJpegUserData(NULL),
74 mJpegClientHandle(0),
75 mJpegSessionId(0),
76 m_pJpegExifObj(NULL),
77 m_bThumbnailNeeded(TRUE),
78 mPPChannelCount(0),
79 m_bInited(FALSE),
80 m_inputPPQ(releaseOngoingPPData, this),
81 m_ongoingPPQ(releaseOngoingPPData, this),
82 m_inputJpegQ(releaseJpegData, this),
83 m_ongoingJpegQ(releaseJpegData, this),
84 m_inputRawQ(releaseRawData, this),
85 mSaveFrmCnt(0),
86 mUseSaveProc(false),
87 mUseJpegBurst(false),
88 mJpegMemOpt(true),
89 m_JpegOutputMemCount(0),
90 mNewJpegSessionNeeded(true),
91 m_bufCountPPQ(0),
92 m_PPindex(0)
93 {
94 memset(&mJpegHandle, 0, sizeof(mJpegHandle));
95 memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
96 memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
97 memset(mPPChannels, 0, sizeof(mPPChannels));
98 m_DataMem = NULL;
99 mOfflineDataBufs = NULL;
100 }
101
102 /*===========================================================================
103 * FUNCTION : ~QCameraPostProcessor
104 *
105 * DESCRIPTION: deconstructor of QCameraPostProcessor.
106 *
107 * PARAMETERS : None
108 *
109 * RETURN : None
110 *==========================================================================*/
~QCameraPostProcessor()111 QCameraPostProcessor::~QCameraPostProcessor()
112 {
113 FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);
114 if (m_pJpegExifObj != NULL) {
115 delete m_pJpegExifObj;
116 m_pJpegExifObj = NULL;
117 }
118 for (int8_t i = 0; i < mPPChannelCount; i++) {
119 QCameraChannel *pChannel = mPPChannels[i];
120 if ( pChannel != NULL ) {
121 pChannel->stop();
122 delete pChannel;
123 pChannel = NULL;
124 }
125 }
126 mPPChannelCount = 0;
127 }
128
129 /*===========================================================================
130 * FUNCTION : setJpegHandle
131 *
132 * DESCRIPTION: set JPEG client handles
133 *
134 * PARAMETERS :
135 * @pJpegHandle : JPEG ops handle
136 * @pJpegMpoHandle : MPO JPEG ops handle
137 * @clientHandle : JPEG client handle
138 *
139 * RETURN : int32_t type of status
140 * NO_ERROR -- success
141 * none-zero failure code
142 *==========================================================================*/
setJpegHandle(mm_jpeg_ops_t * pJpegHandle,mm_jpeg_mpo_ops_t * pJpegMpoHandle,uint32_t clientHandle)143 int32_t QCameraPostProcessor::setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
144 mm_jpeg_mpo_ops_t *pJpegMpoHandle, uint32_t clientHandle)
145 {
146 LOGH("E mJpegClientHandle: %d, clientHandle: %d",
147 mJpegClientHandle, clientHandle);
148
149 if(pJpegHandle) {
150 memcpy(&mJpegHandle, pJpegHandle, sizeof(mm_jpeg_ops_t));
151 }
152
153 if(pJpegMpoHandle) {
154 memcpy(&mJpegMpoHandle, pJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
155 }
156 mJpegClientHandle = clientHandle;
157 LOGH("X mJpegClientHandle: %d, clientHandle: %d",
158 mJpegClientHandle, clientHandle);
159 return NO_ERROR;
160 }
161
162 /*===========================================================================
163 * FUNCTION : init
164 *
165 * DESCRIPTION: initialization of postprocessor
166 *
167 * PARAMETERS :
168 * @jpeg_cb : callback to handle jpeg event from mm-camera-interface
169 * @user_data : user data ptr for jpeg callback
170 *
171 * RETURN : int32_t type of status
172 * NO_ERROR -- success
173 * none-zero failure code
174 *==========================================================================*/
init(jpeg_encode_callback_t jpeg_cb,void * user_data)175 int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
176 {
177 mJpegCB = jpeg_cb;
178 mJpegUserData = user_data;
179 m_dataProcTh.launch(dataProcessRoutine, this);
180 m_saveProcTh.launch(dataSaveRoutine, this);
181 m_parent->mParameters.setReprocCount();
182 m_bInited = TRUE;
183 return NO_ERROR;
184 }
185
186 /*===========================================================================
187 * FUNCTION : deinit
188 *
189 * DESCRIPTION: de-initialization of postprocessor
190 *
191 * PARAMETERS : None
192 *
193 * RETURN : int32_t type of status
194 * NO_ERROR -- success
195 * none-zero failure code
196 *==========================================================================*/
deinit()197 int32_t QCameraPostProcessor::deinit()
198 {
199 if (m_bInited == TRUE) {
200 m_dataProcTh.exit();
201 m_saveProcTh.exit();
202 m_bInited = FALSE;
203 }
204 return NO_ERROR;
205 }
206
207 /*===========================================================================
208 * FUNCTION : start
209 *
210 * DESCRIPTION: start postprocessor. Data process thread and data notify thread
211 * will be launched.
212 *
213 * PARAMETERS :
214 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
215 *
216 * RETURN : int32_t type of status
217 * NO_ERROR -- success
218 * none-zero failure code
219 *
220 * NOTE : if any reprocess is needed, a reprocess channel/stream
221 * will be started.
222 *==========================================================================*/
start(QCameraChannel * pSrcChannel)223 int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
224 {
225 char prop[PROPERTY_VALUE_MAX];
226 int32_t rc = NO_ERROR;
227 QCameraChannel *pInputChannel = pSrcChannel;
228
229 LOGH("E ");
230 if (m_bInited == FALSE) {
231 LOGE("postproc not initialized yet");
232 return UNKNOWN_ERROR;
233 }
234
235 if (m_DataMem != NULL) {
236 m_DataMem->release(m_DataMem);
237 m_DataMem = NULL;
238 }
239
240 if (pInputChannel == NULL) {
241 LOGE("Input Channel for pproc is NULL.");
242 return UNKNOWN_ERROR;
243 }
244
245 if ( m_parent->needReprocess() ) {
246 for (int8_t i = 0; i < mPPChannelCount; i++) {
247 // Delete previous reproc channel
248 QCameraReprocessChannel *pChannel = mPPChannels[i];
249 if (pChannel != NULL) {
250 pChannel->stop();
251 delete pChannel;
252 pChannel = NULL;
253 }
254 }
255 mPPChannelCount = 0;
256
257 m_bufCountPPQ = 0;
258 if (!m_parent->isLongshotEnabled()) {
259 m_parent->mParameters.setReprocCount();
260 }
261
262 if (m_parent->mParameters.getManualCaptureMode() >=
263 CAM_MANUAL_CAPTURE_TYPE_3) {
264 mPPChannelCount = m_parent->mParameters.getReprocCount() - 1;
265 } else {
266 mPPChannelCount = m_parent->mParameters.getReprocCount();
267 }
268
269 // Create all reproc channels and start channel
270 for (int8_t i = 0; i < mPPChannelCount; i++) {
271 mPPChannels[i] = m_parent->addReprocChannel(pInputChannel, i);
272 if (mPPChannels[i] == NULL) {
273 LOGE("cannot add multi reprocess channel i = %d", i);
274 return UNKNOWN_ERROR;
275 }
276 rc = mPPChannels[i]->start();
277 if (rc != 0) {
278 LOGE("cannot start multi reprocess channel i = %d", i);
279 delete mPPChannels[i];
280 mPPChannels[i] = NULL;
281 return UNKNOWN_ERROR;
282 }
283 pInputChannel = static_cast<QCameraChannel *>(mPPChannels[i]);
284 }
285 }
286
287 property_get("persist.camera.longshot.save", prop, "0");
288 mUseSaveProc = atoi(prop) > 0 ? true : false;
289
290 m_PPindex = 0;
291 m_InputMetadata.clear();
292 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
293 m_parent->m_cbNotifier.startSnapshots();
294 LOGH("X rc = %d", rc);
295 return rc;
296 }
297
298 /*===========================================================================
299 * FUNCTION : stop
300 *
301 * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
302 *
303 * PARAMETERS : None
304 *
305 * RETURN : int32_t type of status
306 * NO_ERROR -- success
307 * none-zero failure code
308 *
309 * NOTE : reprocess channel will be stopped and deleted if there is any
310 *==========================================================================*/
stop()311 int32_t QCameraPostProcessor::stop()
312 {
313 if (m_bInited == TRUE) {
314 m_parent->m_cbNotifier.stopSnapshots();
315
316 if (m_DataMem != NULL) {
317 m_DataMem->release(m_DataMem);
318 m_DataMem = NULL;
319 }
320
321 // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
322 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
323 }
324 // stop reproc channel if exists
325 for (int8_t i = 0; i < mPPChannelCount; i++) {
326 QCameraReprocessChannel *pChannel = mPPChannels[i];
327 if (pChannel != NULL) {
328 pChannel->stop();
329 delete pChannel;
330 pChannel = NULL;
331 }
332 }
333 mPPChannelCount = 0;
334 m_PPindex = 0;
335 m_InputMetadata.clear();
336
337 if (mOfflineDataBufs != NULL) {
338 mOfflineDataBufs->deallocate();
339 delete mOfflineDataBufs;
340 mOfflineDataBufs = NULL;
341 }
342 return NO_ERROR;
343 }
344
345 /*===========================================================================
346 * FUNCTION : createJpegSession
347 *
348 * DESCRIPTION: start JPEG session in parallel to reproces to reduce the KPI
349 *
350 * PARAMETERS :
351 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
352 *
353 * RETURN : int32_t type of status
354 * NO_ERROR -- success
355 * none-zero failure code
356 *==========================================================================*/
createJpegSession(QCameraChannel * pSrcChannel)357 int32_t QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
358 {
359 int32_t rc = NO_ERROR;
360
361 LOGH("E ");
362 if (m_bInited == FALSE) {
363 LOGE("postproc not initialized yet");
364 return UNKNOWN_ERROR;
365 }
366
367 if (pSrcChannel == NULL) {
368 LOGE("Input Channel for pproc is NULL.");
369 return UNKNOWN_ERROR;
370 }
371
372 if (mPPChannelCount > 0) {
373 QCameraChannel *pChannel = NULL;
374 int ppChannel_idx = mPPChannelCount - 1;
375 pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
376 pSrcChannel;
377 QCameraStream *pSnapshotStream = NULL;
378 QCameraStream *pThumbStream = NULL;
379 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
380 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
381 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
382 !m_parent->mParameters.generateThumbFromMain());
383
384 if (pChannel == NULL) {
385 LOGE("Input Channel for pproc is NULL for index %d.",
386 ppChannel_idx);
387 return UNKNOWN_ERROR;
388 }
389
390 for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
391 QCameraStream *pStream = pChannel->getStreamByIndex(i);
392
393 if ( NULL == pStream ) {
394 break;
395 }
396
397 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
398 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
399 pSnapshotStream = pStream;
400 }
401
402 if ((thumb_stream_needed) &&
403 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
404 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
405 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
406 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
407 pThumbStream = pStream;
408 }
409 }
410
411 // If thumbnail is not part of the reprocess channel, then
412 // try to get it from the source channel
413 if ((thumb_stream_needed) && (NULL == pThumbStream) &&
414 (pChannel == mPPChannels[ppChannel_idx])) {
415 for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
416 QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
417
418 if ( NULL == pStream ) {
419 break;
420 }
421
422 if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
423 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
424 pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
425 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
426 pThumbStream = pStream;
427 }
428 }
429 }
430
431 if ( NULL != pSnapshotStream ) {
432 mm_jpeg_encode_params_t encodeParam;
433 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
434 rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
435 if (rc != NO_ERROR) {
436 LOGE("error getting encoding config");
437 return rc;
438 }
439 LOGH("[KPI Perf] : call jpeg create_session");
440
441 rc = mJpegHandle.create_session(mJpegClientHandle,
442 &encodeParam,
443 &mJpegSessionId);
444 if (rc != NO_ERROR) {
445 LOGE("error creating a new jpeg encoding session");
446 return rc;
447 }
448 mNewJpegSessionNeeded = false;
449 }
450 }
451 LOGH("X ");
452 return rc;
453 }
454
455 /*===========================================================================
456 * FUNCTION : getJpegEncodingConfig
457 *
458 * DESCRIPTION: function to prepare encoding job information
459 *
460 * PARAMETERS :
461 * @encode_parm : param to be filled with encoding configuration
462 *
463 * RETURN : int32_t type of status
464 * NO_ERROR -- success
465 * none-zero failure code
466 *==========================================================================*/
getJpegEncodingConfig(mm_jpeg_encode_params_t & encode_parm,QCameraStream * main_stream,QCameraStream * thumb_stream)467 int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
468 QCameraStream *main_stream,
469 QCameraStream *thumb_stream)
470 {
471 LOGD("E");
472 int32_t ret = NO_ERROR;
473 size_t out_size;
474
475 char prop[PROPERTY_VALUE_MAX];
476 property_get("persist.camera.jpeg_burst", prop, "0");
477 mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
478 encode_parm.burst_mode = mUseJpegBurst;
479
480 cam_rect_t crop;
481 memset(&crop, 0, sizeof(cam_rect_t));
482 main_stream->getCropInfo(crop);
483
484 cam_dimension_t src_dim, dst_dim;
485 memset(&src_dim, 0, sizeof(cam_dimension_t));
486 memset(&dst_dim, 0, sizeof(cam_dimension_t));
487 main_stream->getFrameDimension(src_dim);
488
489 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
490 if (hdr_output_crop && crop.height) {
491 dst_dim.height = crop.height;
492 } else {
493 dst_dim.height = src_dim.height;
494 }
495 if (hdr_output_crop && crop.width) {
496 dst_dim.width = crop.width;
497 } else {
498 dst_dim.width = src_dim.width;
499 }
500
501 // set rotation only when no online rotation or offline pp rotation is done before
502 if (!m_parent->needRotationReprocess()) {
503 encode_parm.rotation = m_parent->mParameters.getJpegRotation();
504 }
505
506 encode_parm.main_dim.src_dim = src_dim;
507 encode_parm.main_dim.dst_dim = dst_dim;
508
509 m_dst_dim = dst_dim;
510
511 encode_parm.jpeg_cb = mJpegCB;
512 encode_parm.userdata = mJpegUserData;
513
514 m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
515 // system property to disable the thumbnail encoding in order to reduce the power
516 // by default thumbnail encoding is set to TRUE and explicitly set this property to
517 // disable the thumbnail encoding
518 property_get("persist.camera.tn.disable", prop, "0");
519 if (atoi(prop) == 1) {
520 m_bThumbnailNeeded = FALSE;
521 LOGH("m_bThumbnailNeeded is %d", m_bThumbnailNeeded);
522 }
523 cam_dimension_t thumbnailSize;
524 memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
525 m_parent->getThumbnailSize(thumbnailSize);
526 if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
527 // (0,0) means no thumbnail
528 m_bThumbnailNeeded = FALSE;
529 }
530 encode_parm.encode_thumbnail = m_bThumbnailNeeded;
531
532 // get color format
533 cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
534 main_stream->getFormat(img_fmt);
535 encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
536
537 // get jpeg quality
538 uint32_t val = m_parent->getJpegQuality();
539 if (0U < val) {
540 encode_parm.quality = val;
541 } else {
542 LOGH("Using default JPEG quality");
543 encode_parm.quality = 85;
544 }
545 cam_frame_len_offset_t main_offset;
546 memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
547 main_stream->getFrameOffset(main_offset);
548
549 // src buf config
550 QCameraMemory *pStreamMem = main_stream->getStreamBufs();
551 if (pStreamMem == NULL) {
552 LOGE("cannot get stream bufs from main stream");
553 ret = BAD_VALUE;
554 goto on_error;
555 }
556 encode_parm.num_src_bufs = pStreamMem->getCnt();
557 for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
558 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
559 if (stream_mem != NULL) {
560 encode_parm.src_main_buf[i].index = i;
561 encode_parm.src_main_buf[i].buf_size = stream_mem->size;
562 encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
563 encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
564 encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
565 encode_parm.src_main_buf[i].offset = main_offset;
566 }
567 }
568 LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
569 "src_dim = %dX%d dst_dim = %dX%d",
570 encode_parm.num_src_bufs,
571 main_offset.mp[0].width, main_offset.mp[0].height,
572 main_offset.frame_len, encode_parm.rotation,
573 src_dim.width, src_dim.height,
574 dst_dim.width, dst_dim.height);
575
576 if (m_bThumbnailNeeded == TRUE) {
577 m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
578
579 if (thumb_stream == NULL) {
580 thumb_stream = main_stream;
581 }
582 if (((90 == m_parent->mParameters.getJpegRotation())
583 || (270 == m_parent->mParameters.getJpegRotation()))
584 && (m_parent->needRotationReprocess())) {
585 // swap thumbnail dimensions
586 cam_dimension_t tmp_dim = encode_parm.thumb_dim.dst_dim;
587 encode_parm.thumb_dim.dst_dim.width = tmp_dim.height;
588 encode_parm.thumb_dim.dst_dim.height = tmp_dim.width;
589 }
590 pStreamMem = thumb_stream->getStreamBufs();
591 if (pStreamMem == NULL) {
592 LOGE("cannot get stream bufs from thumb stream");
593 ret = BAD_VALUE;
594 goto on_error;
595 }
596 cam_frame_len_offset_t thumb_offset;
597 memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
598 thumb_stream->getFrameOffset(thumb_offset);
599 encode_parm.num_tmb_bufs = pStreamMem->getCnt();
600 for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
601 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
602 if (stream_mem != NULL) {
603 encode_parm.src_thumb_buf[i].index = i;
604 encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
605 encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
606 encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
607 encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
608 encode_parm.src_thumb_buf[i].offset = thumb_offset;
609 }
610 }
611 cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
612 thumb_stream->getFormat(img_fmt_thumb);
613 encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
614
615 // crop is the same if frame is the same
616 if (thumb_stream != main_stream) {
617 memset(&crop, 0, sizeof(cam_rect_t));
618 thumb_stream->getCropInfo(crop);
619 }
620
621 memset(&src_dim, 0, sizeof(cam_dimension_t));
622 thumb_stream->getFrameDimension(src_dim);
623 encode_parm.thumb_dim.src_dim = src_dim;
624
625 if (!m_parent->needRotationReprocess()) {
626 encode_parm.thumb_rotation = m_parent->mParameters.getJpegRotation();
627 }
628 encode_parm.thumb_dim.crop = crop;
629 encode_parm.thumb_from_postview =
630 !m_parent->mParameters.generateThumbFromMain() &&
631 (img_fmt_thumb != CAM_FORMAT_YUV_420_NV12_UBWC) &&
632 (m_parent->mParameters.useJpegExifRotation() ||
633 m_parent->mParameters.getJpegRotation() == 0);
634 LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
635 "src_dim = %dX%d, dst_dim = %dX%d",
636 encode_parm.num_tmb_bufs,
637 thumb_offset.mp[0].width, thumb_offset.mp[0].height,
638 thumb_offset.frame_len, encode_parm.thumb_rotation,
639 encode_parm.thumb_dim.src_dim.width,
640 encode_parm.thumb_dim.src_dim.height,
641 encode_parm.thumb_dim.dst_dim.width,
642 encode_parm.thumb_dim.dst_dim.height);
643 }
644
645 if (m_parent->mParameters.useJpegExifRotation()){
646 encode_parm.thumb_rotation = m_parent->mParameters.getJpegExifRotation();
647 }
648
649 encode_parm.num_dst_bufs = 1;
650 if (mUseJpegBurst) {
651 encode_parm.num_dst_bufs = MAX_JPEG_BURST;
652 }
653 encode_parm.get_memory = NULL;
654 out_size = main_offset.frame_len;
655 if (mJpegMemOpt) {
656 encode_parm.get_memory = getJpegMemory;
657 encode_parm.put_memory = releaseJpegMemory;
658 out_size = sizeof(omx_jpeg_ouput_buf_t);
659 encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
660 }
661 m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
662 for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
663 if (m_pJpegOutputMem[i] != NULL)
664 free(m_pJpegOutputMem[i]);
665 omx_jpeg_ouput_buf_t omx_out_buf;
666 memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
667 omx_out_buf.handle = this;
668 // allocate output buf for jpeg encoding
669 m_pJpegOutputMem[i] = malloc(out_size);
670
671 if (NULL == m_pJpegOutputMem[i]) {
672 ret = NO_MEMORY;
673 LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
674 goto on_error;
675 }
676
677 if (mJpegMemOpt) {
678 memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
679 }
680
681 encode_parm.dest_buf[i].index = i;
682 encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
683 encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
684 encode_parm.dest_buf[i].fd = -1;
685 encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
686 encode_parm.dest_buf[i].offset = main_offset;
687 }
688
689 LOGD("X");
690 return NO_ERROR;
691
692 on_error:
693 FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
694
695 LOGD("X with error %d", ret);
696 return ret;
697 }
698
699 /*===========================================================================
700 * FUNCTION : sendEvtNotify
701 *
702 * DESCRIPTION: send event notify through notify callback registered by upper layer
703 *
704 * PARAMETERS :
705 * @msg_type: msg type of notify
706 * @ext1 : extension
707 * @ext2 : extension
708 *
709 * RETURN : int32_t type of status
710 * NO_ERROR -- success
711 * none-zero failure code
712 *==========================================================================*/
sendEvtNotify(int32_t msg_type,int32_t ext1,int32_t ext2)713 int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
714 int32_t ext1,
715 int32_t ext2)
716 {
717 return m_parent->sendEvtNotify(msg_type, ext1, ext2);
718 }
719
720 /*===========================================================================
721 * FUNCTION : sendDataNotify
722 *
723 * DESCRIPTION: enqueue data into dataNotify thread
724 *
725 * PARAMETERS :
726 * @msg_type: data callback msg type
727 * @data : ptr to data memory struct
728 * @index : index to data buffer
729 * @metadata: ptr to meta data buffer if there is any
730 * @release_data : ptr to struct indicating if data need to be released
731 * after notify
732 * @super_buf_frame_idx : super buffer frame index
733 *
734 * RETURN : int32_t type of status
735 * NO_ERROR -- success
736 * none-zero failure code
737 *==========================================================================*/
sendDataNotify(int32_t msg_type,camera_memory_t * data,uint8_t index,camera_frame_metadata_t * metadata,qcamera_release_data_t * release_data,uint32_t super_buf_frame_idx)738 int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
739 camera_memory_t *data,
740 uint8_t index,
741 camera_frame_metadata_t *metadata,
742 qcamera_release_data_t *release_data,
743 uint32_t super_buf_frame_idx)
744 {
745 qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
746 if (NULL == data_cb) {
747 LOGE("no mem for acamera_data_argm_t");
748 return NO_MEMORY;
749 }
750 memset(data_cb, 0, sizeof(qcamera_data_argm_t));
751 data_cb->msg_type = msg_type;
752 data_cb->data = data;
753 data_cb->index = index;
754 data_cb->metadata = metadata;
755 if (release_data != NULL) {
756 data_cb->release_data = *release_data;
757 }
758
759 qcamera_callback_argm_t cbArg;
760 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
761 cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
762 cbArg.msg_type = msg_type;
763 cbArg.data = data;
764 cbArg.metadata = metadata;
765 cbArg.user_data = data_cb;
766 cbArg.cookie = this;
767 cbArg.release_cb = releaseNotifyData;
768 cbArg.frame_index = super_buf_frame_idx;
769 int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
770 if ( NO_ERROR != rc ) {
771 LOGE("Error enqueuing jpeg data into notify queue");
772 releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
773 return UNKNOWN_ERROR;
774 }
775
776 return rc;
777 }
778
779 /*===========================================================================
780 * FUNCTION : validatePostProcess
781 *
782 * DESCRIPTION: Verify output buffer count of pp module
783 *
784 * PARAMETERS :
785 * @frame : process frame received from mm-camera-interface
786 *
787 * RETURN : bool type of status
788 * TRUE -- success
789 * FALSE failure
790 *==========================================================================*/
validatePostProcess(mm_camera_super_buf_t * frame)791 bool QCameraPostProcessor::validatePostProcess(mm_camera_super_buf_t *frame)
792 {
793 bool status = TRUE;
794 QCameraChannel *pChannel = NULL;
795 QCameraReprocessChannel *m_pReprocChannel = NULL;
796
797 if (frame == NULL) {
798 return status;
799 }
800
801 pChannel = m_parent->getChannelByHandle(frame->ch_id);
802 for (int8_t i = 0; i < mPPChannelCount; i++) {
803 if (pChannel == mPPChannels[i]->getSrcChannel()) {
804 m_pReprocChannel = mPPChannels[i];
805 break;
806 }
807 }
808
809 if ((m_pReprocChannel != NULL) && (pChannel == m_pReprocChannel->getSrcChannel())) {
810 QCameraStream *pStream = NULL;
811 for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
812 pStream = m_pReprocChannel->getStreamByIndex(i);
813 if (pStream && (m_inputPPQ.getCurrentSize() > 0) &&
814 (pStream->getNumQueuedBuf() <= 0)) {
815 LOGW("Out of PP Buffer PPQ = %d ongoingQ = %d Jpeg = %d onJpeg = %d",
816 m_inputPPQ.getCurrentSize(), m_inputPPQ.getCurrentSize(),
817 m_inputJpegQ.getCurrentSize(), m_ongoingJpegQ.getCurrentSize());
818 status = FALSE;
819 break;
820 }
821 }
822 }
823 return status;
824 }
825
826 /*===========================================================================
827 * FUNCTION : getOfflinePPInputBuffer
828 *
829 * DESCRIPTION: Function to generate offline post proc buffer
830 *
831 * PARAMETERS :
832 * @src_frame : process frame received from mm-camera-interface
833 *
834 * RETURN : Buffer pointer if successfull
835 * : NULL in case of failures
836 *==========================================================================*/
getOfflinePPInputBuffer(mm_camera_super_buf_t * src_frame)837 mm_camera_buf_def_t *QCameraPostProcessor::getOfflinePPInputBuffer(
838 mm_camera_super_buf_t *src_frame)
839 {
840 mm_camera_buf_def_t *mBufDefs = NULL;
841 QCameraChannel *pChannel = NULL;
842 QCameraStream *src_pStream = NULL;
843 mm_camera_buf_def_t *data_frame = NULL;
844 mm_camera_buf_def_t *meta_frame = NULL;
845
846 if (mOfflineDataBufs == NULL) {
847 LOGE("Offline Buffer not allocated");
848 return NULL;
849 }
850
851 uint32_t num_bufs = mOfflineDataBufs->getCnt();
852 size_t bufDefsSize = num_bufs * sizeof(mm_camera_buf_def_t);
853 mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
854 if (mBufDefs == NULL) {
855 LOGE("No memory");
856 return NULL;
857 }
858 memset(mBufDefs, 0, bufDefsSize);
859
860 pChannel = m_parent->getChannelByHandle(src_frame->ch_id);
861 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
862 src_pStream = pChannel->getStreamByHandle(
863 src_frame->bufs[i]->stream_id);
864 if (src_pStream != NULL) {
865 if (src_pStream->getMyType() == CAM_STREAM_TYPE_RAW) {
866 LOGH("Found RAW input stream");
867 data_frame = src_frame->bufs[i];
868 } else if (src_pStream->getMyType() == CAM_STREAM_TYPE_METADATA){
869 LOGH("Found Metada input stream");
870 meta_frame = src_frame->bufs[i];
871 }
872 }
873 }
874
875 if ((src_pStream != NULL) && (data_frame != NULL)) {
876 cam_frame_len_offset_t offset;
877 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
878 src_pStream->getFrameOffset(offset);
879 for (uint32_t i = 0; i < num_bufs; i++) {
880 mBufDefs[i] = *data_frame;
881 mOfflineDataBufs->getBufDef(offset, mBufDefs[i], i);
882
883 LOGD("Dumping RAW data on offline buffer");
884 /*Actual data memcpy just for verification*/
885 memcpy(mBufDefs[i].buffer, data_frame->buffer,
886 mBufDefs[i].frame_len);
887 }
888 releaseSuperBuf(src_frame, CAM_STREAM_TYPE_RAW);
889 } else {
890 free(mBufDefs);
891 mBufDefs = NULL;
892 }
893
894 LOGH("mBufDefs = %p", mBufDefs);
895 return mBufDefs;
896 }
897
898 /*===========================================================================
899 * FUNCTION : processData
900 *
901 * DESCRIPTION: enqueue data into dataProc thread
902 *
903 * PARAMETERS :
904 * @frame : process frame received from mm-camera-interface
905 *
906 * RETURN : int32_t type of status
907 * NO_ERROR -- success
908 * none-zero failure code
909 *
910 * NOTE : depends on if offline reprocess is needed, received frame will
911 * be sent to either input queue of postprocess or jpeg encoding
912 *==========================================================================*/
processData(mm_camera_super_buf_t * frame)913 int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
914 {
915 if (m_bInited == FALSE) {
916 LOGE("postproc not initialized yet");
917 return UNKNOWN_ERROR;
918 }
919
920 if (frame == NULL) {
921 LOGE("Invalid parameter");
922 return UNKNOWN_ERROR;
923 }
924
925 mm_camera_buf_def_t *meta_frame = NULL;
926 for (uint32_t i = 0; i < frame->num_bufs; i++) {
927 // look through input superbuf
928 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
929 meta_frame = frame->bufs[i];
930 break;
931 }
932 }
933 if (meta_frame != NULL) {
934 //Function to upadte metadata for frame based parameter
935 m_parent->updateMetadata((metadata_buffer_t *)meta_frame->buffer);
936 }
937
938 if (m_parent->needReprocess()) {
939 if ((!m_parent->isLongshotEnabled() &&
940 !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
941 (m_parent->isLongshotEnabled() &&
942 m_parent->isCaptureShutterEnabled())) {
943 //play shutter sound
944 m_parent->playShutter();
945 }
946
947 ATRACE_INT("Camera:Reprocess", 1);
948 LOGH("need reprocess");
949
950 // enqueu to post proc input queue
951 qcamera_pp_data_t *pp_request_job =
952 (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
953 if (pp_request_job == NULL) {
954 LOGE("No memory for pproc job");
955 return NO_MEMORY;
956 }
957 memset(pp_request_job, 0, sizeof(qcamera_pp_data_t));
958 pp_request_job->src_frame = frame;
959 pp_request_job->src_reproc_frame = frame;
960 pp_request_job->reprocCount = 0;
961 pp_request_job->ppChannelIndex = 0;
962
963 if ((NULL != frame) &&
964 (0 < frame->num_bufs)
965 && (m_parent->isRegularCapture())) {
966 /*Regular capture. Source stream will be deleted*/
967 mm_camera_buf_def_t *bufs = NULL;
968 uint32_t num_bufs = frame->num_bufs;
969 bufs = new mm_camera_buf_def_t[num_bufs];
970 if (NULL == bufs) {
971 LOGE("Unable to allocate cached buffers");
972 return NO_MEMORY;
973 }
974
975 for (uint32_t i = 0; i < num_bufs; i++) {
976 bufs[i] = *frame->bufs[i];
977 frame->bufs[i] = &bufs[i];
978 }
979 pp_request_job->src_reproc_bufs = bufs;
980
981 // Don't release source frame after encoding
982 // at this point the source channel will not exist.
983 pp_request_job->reproc_frame_release = true;
984 }
985
986 if (mOfflineDataBufs != NULL) {
987 pp_request_job->offline_reproc_buf =
988 getOfflinePPInputBuffer(frame);
989 if (pp_request_job->offline_reproc_buf != NULL) {
990 pp_request_job->offline_buffer = true;
991 }
992 }
993
994 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
995 LOGW("Input PP Q is not active!!!");
996 releaseSuperBuf(frame);
997 free(frame);
998 free(pp_request_job);
999 frame = NULL;
1000 pp_request_job = NULL;
1001 return NO_ERROR;
1002 }
1003 if (m_parent->mParameters.isAdvCamFeaturesEnabled()
1004 && (meta_frame != NULL)) {
1005 m_InputMetadata.add(meta_frame);
1006 }
1007 } else if (m_parent->mParameters.isNV16PictureFormat() ||
1008 m_parent->mParameters.isNV21PictureFormat()) {
1009 //check if raw frame information is needed.
1010 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1011 setYUVFrameInfo(frame);
1012
1013 processRawData(frame);
1014 } else {
1015 //play shutter sound
1016 if(!m_parent->m_stateMachine.isNonZSLCaptureRunning() &&
1017 !m_parent->mLongshotEnabled)
1018 m_parent->playShutter();
1019
1020 LOGH("no need offline reprocess, sending to jpeg encoding");
1021 qcamera_jpeg_data_t *jpeg_job =
1022 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1023 if (jpeg_job == NULL) {
1024 LOGE("No memory for jpeg job");
1025 return NO_MEMORY;
1026 }
1027
1028 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1029 jpeg_job->src_frame = frame;
1030
1031 if (meta_frame != NULL) {
1032 // fill in meta data frame ptr
1033 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1034 }
1035
1036 // enqueu to jpeg input queue
1037 if (!m_inputJpegQ.enqueue((void *)jpeg_job)) {
1038 LOGW("Input Jpeg Q is not active!!!");
1039 releaseJpegJobData(jpeg_job);
1040 free(jpeg_job);
1041 jpeg_job = NULL;
1042 return NO_ERROR;
1043 }
1044 }
1045
1046 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1047 return NO_ERROR;
1048 }
1049
1050 /*===========================================================================
1051 * FUNCTION : processRawData
1052 *
1053 * DESCRIPTION: enqueue raw data into dataProc thread
1054 *
1055 * PARAMETERS :
1056 * @frame : process frame received from mm-camera-interface
1057 *
1058 * RETURN : int32_t type of status
1059 * NO_ERROR -- success
1060 * none-zero failure code
1061 *==========================================================================*/
processRawData(mm_camera_super_buf_t * frame)1062 int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
1063 {
1064 if (m_bInited == FALSE) {
1065 LOGE("postproc not initialized yet");
1066 return UNKNOWN_ERROR;
1067 }
1068
1069 // enqueu to raw input queue
1070 if (m_inputRawQ.enqueue((void *)frame)) {
1071 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1072 } else {
1073 LOGW("m_inputRawQ is not active!!!");
1074 releaseSuperBuf(frame);
1075 free(frame);
1076 frame = NULL;
1077 }
1078 return NO_ERROR;
1079 }
1080
1081 /*===========================================================================
1082 * FUNCTION : processJpegEvt
1083 *
1084 * DESCRIPTION: process jpeg event from mm-jpeg-interface.
1085 *
1086 * PARAMETERS :
1087 * @evt : payload of jpeg event, including information about jpeg encoding
1088 * status, jpeg size and so on.
1089 *
1090 * RETURN : int32_t type of status
1091 * NO_ERROR -- success
1092 * none-zero failure code
1093 *
1094 * NOTE : This event will also trigger DataProc thread to move to next job
1095 * processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
1096 * if there is any pending job in jpeg input queue)
1097 *==========================================================================*/
processJpegEvt(qcamera_jpeg_evt_payload_t * evt)1098 int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
1099 {
1100 if (m_bInited == FALSE) {
1101 LOGE("postproc not initialized yet");
1102 return UNKNOWN_ERROR;
1103 }
1104
1105 int32_t rc = NO_ERROR;
1106 camera_memory_t *jpeg_mem = NULL;
1107 omx_jpeg_ouput_buf_t *jpeg_out = NULL;
1108 void *jpegData = NULL;
1109 if (mUseSaveProc && m_parent->isLongshotEnabled()) {
1110 qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
1111 if ( NULL == saveData ) {
1112 LOGE("Can not allocate save data message!");
1113 return NO_MEMORY;
1114 }
1115 *saveData = *evt;
1116 if (m_inputSaveQ.enqueue((void *) saveData)) {
1117 m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1118 } else {
1119 LOGD("m_inputSaveQ PP Q is not active!!!");
1120 free(saveData);
1121 saveData = NULL;
1122 return rc;
1123 }
1124 } else {
1125 /* To be removed later when ISP Frame sync feature is available
1126 qcamera_jpeg_data_t *jpeg_job =
1127 (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue(matchJobId,
1128 (void*)&evt->jobId);
1129 uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;*/
1130 uint32_t frame_idx = 75;
1131 LOGH("FRAME INDEX %d", frame_idx);
1132 // Release jpeg job data
1133 m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
1134
1135 if (m_inputPPQ.getCurrentSize() > 0) {
1136 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1137 }
1138 LOGH("[KPI Perf] : jpeg job %d", evt->jobId);
1139
1140 if ((false == m_parent->m_bIntJpegEvtPending) &&
1141 (m_parent->mDataCb == NULL ||
1142 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
1143 LOGW("No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled");
1144 rc = NO_ERROR;
1145 goto end;
1146 }
1147
1148 if(evt->status == JPEG_JOB_STATUS_ERROR) {
1149 LOGE("Error event handled from jpeg, status = %d",
1150 evt->status);
1151 rc = FAILED_TRANSACTION;
1152 goto end;
1153 }
1154 if (!mJpegMemOpt) {
1155 jpegData = evt->out_data.buf_vaddr;
1156 }
1157 else {
1158 jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
1159 if (jpeg_out != NULL) {
1160 jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
1161 if (jpeg_mem != NULL) {
1162 jpegData = jpeg_mem->data;
1163 }
1164 }
1165 }
1166 m_parent->dumpJpegToFile(jpegData,
1167 evt->out_data.buf_filled_len,
1168 evt->jobId);
1169 LOGH("Dump jpeg_size=%d", evt->out_data.buf_filled_len);
1170 if(true == m_parent->m_bIntJpegEvtPending) {
1171 //Sending JPEG snapshot taken notification to HAL
1172 pthread_mutex_lock(&m_parent->m_int_lock);
1173 pthread_cond_signal(&m_parent->m_int_cond);
1174 pthread_mutex_unlock(&m_parent->m_int_lock);
1175 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1176 return rc;
1177 }
1178 if (!mJpegMemOpt) {
1179 // alloc jpeg memory to pass to upper layer
1180 jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
1181 1, m_parent->mCallbackCookie);
1182 if (NULL == jpeg_mem) {
1183 rc = NO_MEMORY;
1184 LOGE("getMemory for jpeg, ret = NO_MEMORY");
1185 goto end;
1186 }
1187 memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
1188 }
1189 LOGH("Calling upperlayer callback to store JPEG image");
1190 qcamera_release_data_t release_data;
1191 memset(&release_data, 0, sizeof(qcamera_release_data_t));
1192 release_data.data = jpeg_mem;
1193 LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
1194 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1195 jpeg_mem,
1196 0,
1197 NULL,
1198 &release_data,
1199 frame_idx);
1200 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1201
1202 end:
1203 if (rc != NO_ERROR) {
1204 // send error msg to upper layer
1205 LOGE("Jpeg Encoding failed. Notify Application");
1206 sendEvtNotify(CAMERA_MSG_ERROR,
1207 UNKNOWN_ERROR,
1208 0);
1209
1210 if (NULL != jpeg_mem) {
1211 jpeg_mem->release(jpeg_mem);
1212 jpeg_mem = NULL;
1213 }
1214 }
1215
1216 /* check whether to send callback for depth map */
1217 if (m_parent->mParameters.isUbiRefocus() &&
1218 (m_parent->getOutputImageCount() + 1 ==
1219 m_parent->mParameters.getRefocusOutputCount())) {
1220 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1221
1222 jpeg_mem = m_DataMem;
1223 release_data.data = jpeg_mem;
1224 m_DataMem = NULL;
1225 LOGH("[KPI Perf]: send jpeg callback for depthmap ");
1226 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1227 jpeg_mem,
1228 0,
1229 NULL,
1230 &release_data,
1231 frame_idx);
1232 if (rc != NO_ERROR) {
1233 // send error msg to upper layer
1234 sendEvtNotify(CAMERA_MSG_ERROR,
1235 UNKNOWN_ERROR,
1236 0);
1237 if (NULL != jpeg_mem) {
1238 jpeg_mem->release(jpeg_mem);
1239 jpeg_mem = NULL;
1240 }
1241 }
1242 m_DataMem = NULL;
1243 }
1244 }
1245
1246 // wait up data proc thread to do next job,
1247 // if previous request is blocked due to ongoing jpeg job
1248 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1249
1250 return rc;
1251 }
1252
1253 /*===========================================================================
1254 * FUNCTION : processPPData
1255 *
1256 * DESCRIPTION: process received frame after reprocess.
1257 *
1258 * PARAMETERS :
1259 * @frame : received frame from reprocess channel.
1260 *
1261 * RETURN : int32_t type of status
1262 * NO_ERROR -- success
1263 * none-zero failure code
1264 *
1265 * NOTE : The frame after reprocess need to send to jpeg encoding.
1266 *==========================================================================*/
processPPData(mm_camera_super_buf_t * frame)1267 int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
1268 {
1269 bool triggerEvent = TRUE;
1270
1271 LOGD("QCameraPostProcessor::processPPData");
1272 bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
1273 if (m_bInited == FALSE) {
1274 LOGE("postproc not initialized yet");
1275 return UNKNOWN_ERROR;
1276 }
1277
1278 qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
1279 if (NULL == job) {
1280 LOGE("Cannot find reprocess job");
1281 return BAD_VALUE;
1282 }
1283
1284 if (!needSuperBufMatch && (job->src_frame == NULL
1285 || job->src_reproc_frame == NULL) ) {
1286 LOGE("Invalid reprocess job");
1287 return BAD_VALUE;
1288 }
1289
1290 if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
1291 m_parent->mParameters.isNV21PictureFormat())) {
1292 releaseOngoingPPData(job, this);
1293 free(job);
1294
1295 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1296 setYUVFrameInfo(frame);
1297 return processRawData(frame);
1298 }
1299 #ifdef TARGET_TS_MAKEUP
1300 // find snapshot frame frame
1301 mm_camera_buf_def_t *pReprocFrame = NULL;
1302 QCameraStream * pSnapshotStream = NULL;
1303 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1304 if (pChannel == NULL) {
1305 for (int8_t i = 0; i < mPPChannelCount; i++) {
1306 if ((mPPChannels[i] != NULL) &&
1307 (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
1308 pChannel = mPPChannels[i];
1309 break;
1310 }
1311 }
1312 }
1313 if (pChannel == NULL) {
1314 LOGE("No corresponding channel (ch_id = %d) exist, return here",
1315 frame->ch_id);
1316 return BAD_VALUE;
1317 }
1318
1319 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1320 pSnapshotStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1321 if (pSnapshotStream != NULL) {
1322 if (pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
1323 pReprocFrame = frame->bufs[i];
1324 break;
1325 }
1326 }
1327 }
1328 if (pReprocFrame != NULL && m_parent->mParameters.isFaceDetectionEnabled()) {
1329 m_parent->TsMakeupProcess_Snapshot(pReprocFrame,pSnapshotStream);
1330 } else {
1331 LOGH("pReprocFrame == NULL || isFaceDetectionEnabled = %d",
1332 m_parent->mParameters.isFaceDetectionEnabled());
1333 }
1334 #endif
1335 if ((m_parent->isLongshotEnabled())
1336 && (!m_parent->isCaptureShutterEnabled())
1337 && (!m_parent->mCACDoneReceived)) {
1338 // play shutter sound for longshot
1339 // after reprocess is done
1340 m_parent->playShutter();
1341 }
1342 m_parent->mCACDoneReceived = FALSE;
1343
1344 int8_t mCurReprocCount = job->reprocCount;
1345 int8_t mCurChannelIndex = job->ppChannelIndex;
1346 if ( mCurReprocCount > 1 ) {
1347 //In case of pp 2nd pass, we can release input of 2nd pass
1348 releaseSuperBuf(job->src_frame);
1349 free(job->src_frame);
1350 job->src_frame = NULL;
1351 }
1352
1353 LOGD("mCurReprocCount = %d mCurChannelIndex = %d mTotalNumReproc = %d",
1354 mCurReprocCount, mCurChannelIndex,
1355 m_parent->mParameters.getReprocCount());
1356 if (mCurReprocCount < m_parent->mParameters.getReprocCount()) {
1357 //More pp pass needed. Push frame back to pp queue.
1358 qcamera_pp_data_t *pp_request_job = job;
1359 pp_request_job->src_frame = frame;
1360
1361 if ((mPPChannels[mCurChannelIndex]->getReprocCount()
1362 == mCurReprocCount) &&
1363 (mPPChannels[mCurChannelIndex + 1] != NULL)) {
1364 pp_request_job->ppChannelIndex++;
1365 }
1366
1367 // enqueu to post proc input queue
1368 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
1369 LOGW("m_input PP Q is not active!!!");
1370 releaseOngoingPPData(pp_request_job,this);
1371 free(pp_request_job);
1372 pp_request_job = NULL;
1373 triggerEvent = FALSE;
1374 }
1375 } else {
1376 //Done with post processing. Send frame to Jpeg
1377 qcamera_jpeg_data_t *jpeg_job =
1378 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1379 if (jpeg_job == NULL) {
1380 LOGE("No memory for jpeg job");
1381 return NO_MEMORY;
1382 }
1383
1384 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1385 jpeg_job->src_frame = frame;
1386 jpeg_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
1387 jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
1388 jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
1389 jpeg_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
1390 jpeg_job->offline_buffer = job ? job->offline_buffer : false;
1391
1392 // find meta data frame
1393 mm_camera_buf_def_t *meta_frame = NULL;
1394 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
1395 size_t meta_idx = m_parent->mParameters.getExifBufIndex(m_PPindex);
1396 if (m_InputMetadata.size() >= (meta_idx + 1)) {
1397 meta_frame = m_InputMetadata.itemAt(meta_idx);
1398 } else {
1399 LOGW("Input metadata vector contains %d entries, index required %d",
1400 m_InputMetadata.size(), meta_idx);
1401 }
1402 m_PPindex++;
1403 } else {
1404 for (uint32_t i = 0; job && job->src_reproc_frame &&
1405 (i < job->src_reproc_frame->num_bufs); i++) {
1406 // look through input superbuf
1407 if (job->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1408 meta_frame = job->src_reproc_frame->bufs[i];
1409 break;
1410 }
1411 }
1412
1413 if (meta_frame == NULL) {
1414 // look through reprocess superbuf
1415 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1416 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1417 meta_frame = frame->bufs[i];
1418 break;
1419 }
1420 }
1421 }
1422 }
1423 if (meta_frame != NULL) {
1424 // fill in meta data frame ptr
1425 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1426 }
1427
1428 // enqueu reprocessed frame to jpeg input queue
1429 if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
1430 LOGW("Input Jpeg Q is not active!!!");
1431 releaseJpegJobData(jpeg_job);
1432 free(jpeg_job);
1433 jpeg_job = NULL;
1434 triggerEvent = FALSE;
1435 }
1436
1437 // free pp job buf
1438 if (job) {
1439 free(job);
1440 }
1441 }
1442
1443 LOGD("");
1444 // wait up data proc thread
1445
1446 if (triggerEvent) {
1447 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1448 }
1449
1450 return NO_ERROR;
1451 }
1452
1453 /*===========================================================================
1454 * FUNCTION : findJpegJobByJobId
1455 *
1456 * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
1457 *
1458 * PARAMETERS :
1459 * @jobId : job Id of the job
1460 *
1461 * RETURN : ptr to a jpeg job struct. NULL if not found.
1462 *
1463 * NOTE : Currently only one job is sending to mm-jpeg-interface for jpeg
1464 * encoding. Therefore simply dequeue from the ongoing Jpeg Queue
1465 * will serve the purpose to find the jpeg job.
1466 *==========================================================================*/
findJpegJobByJobId(uint32_t jobId)1467 qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
1468 {
1469 qcamera_jpeg_data_t * job = NULL;
1470 if (jobId == 0) {
1471 LOGE("not a valid jpeg jobId");
1472 return NULL;
1473 }
1474
1475 // currely only one jpeg job ongoing, so simply dequeue the head
1476 job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
1477 return job;
1478 }
1479
1480 /*===========================================================================
1481 * FUNCTION : releasePPInputData
1482 *
1483 * DESCRIPTION: callback function to release post process input data node
1484 *
1485 * PARAMETERS :
1486 * @data : ptr to post process input data
1487 * @user_data : user data ptr (QCameraReprocessor)
1488 *
1489 * RETURN : None
1490 *==========================================================================*/
releasePPInputData(void * data,void * user_data)1491 void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
1492 {
1493 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1494 if (NULL != pme) {
1495 qcamera_pp_request_t *pp_job = (qcamera_pp_request_t *)data;
1496 if (NULL != pp_job->src_frame) {
1497 pme->releaseSuperBuf(pp_job->src_frame);
1498 if (pp_job->src_frame == pp_job->src_reproc_frame)
1499 pp_job->src_reproc_frame = NULL;
1500 free(pp_job->src_frame);
1501 pp_job->src_frame = NULL;
1502 }
1503 if (NULL != pp_job->src_reproc_frame) {
1504 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1505 free(pp_job->src_reproc_frame);
1506 pp_job->src_reproc_frame = NULL;
1507 }
1508 pp_job->reprocCount = 0;
1509 }
1510 }
1511
1512 /*===========================================================================
1513 * FUNCTION : releaseJpegData
1514 *
1515 * DESCRIPTION: callback function to release jpeg job node
1516 *
1517 * PARAMETERS :
1518 * @data : ptr to ongoing jpeg job data
1519 * @user_data : user data ptr (QCameraReprocessor)
1520 *
1521 * RETURN : None
1522 *==========================================================================*/
releaseJpegData(void * data,void * user_data)1523 void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
1524 {
1525 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1526 if (NULL != pme) {
1527 pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
1528 LOGH("Rleased job ID %u",
1529 ((qcamera_jpeg_data_t *)data)->jobId);
1530 }
1531 }
1532
1533 /*===========================================================================
1534 * FUNCTION : releaseOngoingPPData
1535 *
1536 * DESCRIPTION: callback function to release ongoing postprocess job node
1537 *
1538 * PARAMETERS :
1539 * @data : ptr to onging postprocess job
1540 * @user_data : user data ptr (QCameraReprocessor)
1541 *
1542 * RETURN : None
1543 *==========================================================================*/
releaseOngoingPPData(void * data,void * user_data)1544 void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
1545 {
1546 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1547 if (NULL != pme) {
1548 qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
1549 if (NULL != pp_job->src_frame) {
1550 if (!pp_job->reproc_frame_release) {
1551 pme->releaseSuperBuf(pp_job->src_frame);
1552 }
1553 if (pp_job->src_frame == pp_job->src_reproc_frame)
1554 pp_job->src_reproc_frame = NULL;
1555
1556 free(pp_job->src_frame);
1557 pp_job->src_frame = NULL;
1558 }
1559 if (NULL != pp_job->src_reproc_frame) {
1560 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1561 free(pp_job->src_reproc_frame);
1562 pp_job->src_reproc_frame = NULL;
1563 }
1564 if ((pp_job->offline_reproc_buf != NULL)
1565 && (pp_job->offline_buffer)) {
1566 free(pp_job->offline_reproc_buf);
1567 pp_job->offline_buffer = false;
1568 }
1569 pp_job->reprocCount = 0;
1570 }
1571 }
1572
1573 /*===========================================================================
1574 * FUNCTION : releaseNotifyData
1575 *
1576 * DESCRIPTION: function to release internal resources in notify data struct
1577 *
1578 * PARAMETERS :
1579 * @user_data : ptr user data
1580 * @cookie : callback cookie
1581 * @cb_status : callback status
1582 *
1583 * RETURN : None
1584 *
1585 * NOTE : deallocate jpeg heap memory if it's not NULL
1586 *==========================================================================*/
releaseNotifyData(void * user_data,void * cookie,int32_t cb_status)1587 void QCameraPostProcessor::releaseNotifyData(void *user_data,
1588 void *cookie,
1589 int32_t cb_status)
1590 {
1591 LOGD("releaseNotifyData release_data %p", user_data);
1592
1593 qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
1594 QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
1595 if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
1596
1597 if ( postProc->mUseSaveProc &&
1598 app_cb->release_data.unlinkFile &&
1599 ( NO_ERROR != cb_status ) ) {
1600
1601 String8 unlinkPath((const char *) app_cb->release_data.data->data,
1602 app_cb->release_data.data->size);
1603 int rc = unlink(unlinkPath.string());
1604 LOGH("Unlinking stored file rc = %d",
1605 rc);
1606 }
1607
1608 if (app_cb && NULL != app_cb->release_data.data) {
1609 app_cb->release_data.data->release(app_cb->release_data.data);
1610 app_cb->release_data.data = NULL;
1611 }
1612 if (app_cb && NULL != app_cb->release_data.frame) {
1613 postProc->releaseSuperBuf(app_cb->release_data.frame);
1614 free(app_cb->release_data.frame);
1615 app_cb->release_data.frame = NULL;
1616 }
1617 if (app_cb && NULL != app_cb->release_data.streamBufs) {
1618 app_cb->release_data.streamBufs->deallocate();
1619 delete app_cb->release_data.streamBufs;
1620 app_cb->release_data.streamBufs = NULL;
1621 }
1622 free(app_cb);
1623 }
1624 }
1625
1626 /*===========================================================================
1627 * FUNCTION : releaseSuperBuf
1628 *
1629 * DESCRIPTION: function to release a superbuf frame by returning back to kernel
1630 *
1631 * PARAMETERS :
1632 * @super_buf : ptr to the superbuf frame
1633 *
1634 * RETURN : None
1635 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf)1636 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
1637 {
1638 QCameraChannel *pChannel = NULL;
1639
1640 if (NULL != super_buf) {
1641 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1642
1643 if ( NULL == pChannel ) {
1644 for (int8_t i = 0; i < mPPChannelCount; i++) {
1645 if ((mPPChannels[i] != NULL) &&
1646 (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
1647 pChannel = mPPChannels[i];
1648 break;
1649 }
1650 }
1651 }
1652
1653 if (pChannel != NULL) {
1654 pChannel->bufDone(super_buf);
1655 } else {
1656 LOGE("Channel id %d not found!!",
1657 super_buf->ch_id);
1658 }
1659 }
1660 }
1661
1662 /*===========================================================================
1663 * FUNCTION : releaseSuperBuf
1664 *
1665 * DESCRIPTION : function to release a superbuf frame by returning back to kernel
1666 *
1667 * PARAMETERS :
1668 * @super_buf : ptr to the superbuf frame
1669 * @stream_type: Type of stream to be released
1670 *
1671 * RETURN : None
1672 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf,cam_stream_type_t stream_type)1673 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf,
1674 cam_stream_type_t stream_type)
1675 {
1676 QCameraChannel *pChannel = NULL;
1677
1678 if (NULL != super_buf) {
1679 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1680 if (pChannel == NULL) {
1681 for (int8_t i = 0; i < mPPChannelCount; i++) {
1682 if ((mPPChannels[i] != NULL) &&
1683 (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
1684 pChannel = mPPChannels[i];
1685 break;
1686 }
1687 }
1688 }
1689
1690 if (pChannel != NULL) {
1691 for (uint32_t i = 0; i < super_buf->num_bufs; i++) {
1692 if (super_buf->bufs[i] != NULL) {
1693 QCameraStream *pStream =
1694 pChannel->getStreamByHandle(super_buf->bufs[i]->stream_id);
1695 if ((pStream != NULL) && ((pStream->getMyType() == stream_type)
1696 || (pStream->getMyOriginalType() == stream_type))) {
1697 pChannel->bufDone(super_buf, super_buf->bufs[i]->stream_id);
1698 break;
1699 }
1700 }
1701 }
1702 } else {
1703 LOGE("Channel id %d not found!!",
1704 super_buf->ch_id);
1705 }
1706 }
1707 }
1708
1709 /*===========================================================================
1710 * FUNCTION : releaseJpegJobData
1711 *
1712 * DESCRIPTION: function to release internal resources in jpeg job struct
1713 *
1714 * PARAMETERS :
1715 * @job : ptr to jpeg job struct
1716 *
1717 * RETURN : None
1718 *
1719 * NOTE : original source frame need to be queued back to kernel for
1720 * future use. Output buf of jpeg job need to be released since
1721 * it's allocated for each job. Exif object need to be deleted.
1722 *==========================================================================*/
releaseJpegJobData(qcamera_jpeg_data_t * job)1723 void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
1724 {
1725 LOGD("E");
1726 if (NULL != job) {
1727 if (NULL != job->src_reproc_frame) {
1728 if (!job->reproc_frame_release) {
1729 releaseSuperBuf(job->src_reproc_frame);
1730 }
1731 free(job->src_reproc_frame);
1732 job->src_reproc_frame = NULL;
1733 }
1734
1735 if (NULL != job->src_frame) {
1736 releaseSuperBuf(job->src_frame);
1737 free(job->src_frame);
1738 job->src_frame = NULL;
1739 }
1740
1741 if (NULL != job->pJpegExifObj) {
1742 delete job->pJpegExifObj;
1743 job->pJpegExifObj = NULL;
1744 }
1745
1746 if (NULL != job->src_reproc_bufs) {
1747 delete [] job->src_reproc_bufs;
1748 }
1749
1750 if ((job->offline_reproc_buf != NULL)
1751 && (job->offline_buffer)) {
1752 free(job->offline_reproc_buf);
1753 job->offline_buffer = false;
1754 }
1755 }
1756 LOGD("X");
1757 }
1758
1759 /*===========================================================================
1760 * FUNCTION : releaseSaveJobData
1761 *
1762 * DESCRIPTION: function to release internal resources in store jobs
1763 *
1764 * PARAMETERS :
1765 * @job : ptr to save job struct
1766 *
1767 * RETURN : None
1768 *
1769 *==========================================================================*/
releaseSaveJobData(void * data,void * user_data)1770 void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
1771 {
1772 LOGD("E");
1773
1774 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
1775 if (NULL == pme) {
1776 LOGE("Invalid postproc handle");
1777 return;
1778 }
1779
1780 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
1781 if (job_data == NULL) {
1782 LOGE("Invalid jpeg event data");
1783 return;
1784 }
1785
1786 // find job by jobId
1787 qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
1788
1789 if (NULL != job) {
1790 pme->releaseJpegJobData(job);
1791 free(job);
1792 } else {
1793 LOGE("Invalid jpeg job");
1794 }
1795
1796 LOGD("X");
1797 }
1798
1799 /*===========================================================================
1800 * FUNCTION : releaseRawData
1801 *
1802 * DESCRIPTION: function to release internal resources in store jobs
1803 *
1804 * PARAMETERS :
1805 * @job : ptr to save job struct
1806 *
1807 * RETURN : None
1808 *
1809 *==========================================================================*/
releaseRawData(void * data,void * user_data)1810 void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
1811 {
1812 LOGD("E");
1813
1814 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
1815 if (NULL == pme) {
1816 LOGE("Invalid postproc handle");
1817 return;
1818 }
1819 mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
1820 pme->releaseSuperBuf(super_buf);
1821
1822 LOGD("X");
1823 }
1824
1825
1826 /*===========================================================================
1827 * FUNCTION : getColorfmtFromImgFmt
1828 *
1829 * DESCRIPTION: function to return jpeg color format based on its image format
1830 *
1831 * PARAMETERS :
1832 * @img_fmt : image format
1833 *
1834 * RETURN : jpeg color format that can be understandable by omx lib
1835 *==========================================================================*/
getColorfmtFromImgFmt(cam_format_t img_fmt)1836 mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
1837 {
1838 switch (img_fmt) {
1839 case CAM_FORMAT_YUV_420_NV21:
1840 case CAM_FORMAT_YUV_420_NV21_VENUS:
1841 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1842 case CAM_FORMAT_YUV_420_NV21_ADRENO:
1843 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1844 case CAM_FORMAT_YUV_420_NV12:
1845 case CAM_FORMAT_YUV_420_NV12_VENUS:
1846 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
1847 case CAM_FORMAT_YUV_420_YV12:
1848 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
1849 case CAM_FORMAT_YUV_422_NV61:
1850 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
1851 case CAM_FORMAT_YUV_422_NV16:
1852 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
1853 default:
1854 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1855 }
1856 }
1857
1858 /*===========================================================================
1859 * FUNCTION : getJpegImgTypeFromImgFmt
1860 *
1861 * DESCRIPTION: function to return jpeg encode image type based on its image format
1862 *
1863 * PARAMETERS :
1864 * @img_fmt : image format
1865 *
1866 * RETURN : return jpeg source image format (YUV or Bitstream)
1867 *==========================================================================*/
getJpegImgTypeFromImgFmt(cam_format_t img_fmt)1868 mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
1869 {
1870 switch (img_fmt) {
1871 case CAM_FORMAT_YUV_420_NV21:
1872 case CAM_FORMAT_YUV_420_NV21_ADRENO:
1873 case CAM_FORMAT_YUV_420_NV12:
1874 case CAM_FORMAT_YUV_420_NV12_VENUS:
1875 case CAM_FORMAT_YUV_420_NV21_VENUS:
1876 case CAM_FORMAT_YUV_420_YV12:
1877 case CAM_FORMAT_YUV_422_NV61:
1878 case CAM_FORMAT_YUV_422_NV16:
1879 return MM_JPEG_FMT_YUV;
1880 default:
1881 return MM_JPEG_FMT_YUV;
1882 }
1883 }
1884
1885 /*===========================================================================
1886 * FUNCTION : queryStreams
1887 *
1888 * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
1889 * streams and frame from bundled super buffer
1890 *
1891 * PARAMETERS :
1892 * @main : ptr to main stream if present
1893 * @thumb : ptr to thumbnail stream if present
1894 * @reproc : ptr to reprocess stream if present
1895 * @main_image : ptr to main image if present
1896 * @thumb_image: ptr to thumbnail image if present
1897 * @frame : bundled super buffer
1898 * @reproc_frame : bundled source frame buffer
1899 *
1900 * RETURN : int32_t type of status
1901 * NO_ERROR -- success
1902 * none-zero failure code
1903 *==========================================================================*/
queryStreams(QCameraStream ** main,QCameraStream ** thumb,QCameraStream ** reproc,mm_camera_buf_def_t ** main_image,mm_camera_buf_def_t ** thumb_image,mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)1904 int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
1905 QCameraStream **thumb,
1906 QCameraStream **reproc,
1907 mm_camera_buf_def_t **main_image,
1908 mm_camera_buf_def_t **thumb_image,
1909 mm_camera_super_buf_t *frame,
1910 mm_camera_super_buf_t *reproc_frame)
1911 {
1912 if (NULL == frame) {
1913 return NO_INIT;
1914 }
1915
1916 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1917 // check reprocess channel if not found
1918 if (pChannel == NULL) {
1919 for (int8_t i = 0; i < mPPChannelCount; i++) {
1920 if ((mPPChannels[i] != NULL) &&
1921 (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
1922 pChannel = mPPChannels[i];
1923 break;
1924 }
1925 }
1926 }
1927 if (pChannel == NULL) {
1928 LOGD("No corresponding channel (ch_id = %d) exist, return here",
1929 frame->ch_id);
1930 return BAD_VALUE;
1931 }
1932
1933 // Use snapshot stream to create thumbnail if snapshot and preview
1934 // flip settings doesn't match in ZSL mode.
1935 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
1936 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
1937 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
1938 !m_parent->mParameters.generateThumbFromMain());
1939
1940 *main = *thumb = *reproc = NULL;
1941 *main_image = *thumb_image = NULL;
1942 // find snapshot frame and thumnail frame
1943 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1944 QCameraStream *pStream =
1945 pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1946 if (pStream != NULL) {
1947 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
1948 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
1949 pStream->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
1950 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO) ||
1951 (m_parent->mParameters.getofflineRAW() &&
1952 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))) {
1953 *main= pStream;
1954 *main_image = frame->bufs[i];
1955 } else if (thumb_stream_needed &&
1956 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
1957 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
1958 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
1959 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
1960 *thumb = pStream;
1961 *thumb_image = frame->bufs[i];
1962 }
1963 if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC) ) {
1964 *reproc = pStream;
1965 }
1966 }
1967 }
1968
1969 if (thumb_stream_needed && *thumb_image == NULL && reproc_frame != NULL) {
1970 QCameraChannel *pSrcReprocChannel = NULL;
1971 pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
1972 if (pSrcReprocChannel != NULL) {
1973 // find thumbnail frame
1974 for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
1975 QCameraStream *pStream =
1976 pSrcReprocChannel->getStreamByHandle(
1977 reproc_frame->bufs[i]->stream_id);
1978 if (pStream != NULL) {
1979 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
1980 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
1981 *thumb = pStream;
1982 *thumb_image = reproc_frame->bufs[i];
1983 }
1984 }
1985 }
1986 }
1987 }
1988
1989 return NO_ERROR;
1990 }
1991
1992 /*===========================================================================
1993 * FUNCTION : syncStreamParams
1994 *
1995 * DESCRIPTION: Query the runtime parameters of all streams included
1996 * in the main and reprocessed frames
1997 *
1998 * PARAMETERS :
1999 * @frame : Main image super buffer
2000 * @reproc_frame : Image supper buffer that got processed
2001 *
2002 * RETURN : int32_t type of status
2003 * NO_ERROR -- success
2004 * none-zero failure code
2005 *==========================================================================*/
syncStreamParams(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)2006 int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame,
2007 mm_camera_super_buf_t *reproc_frame)
2008 {
2009 QCameraStream *reproc_stream = NULL;
2010 QCameraStream *main_stream = NULL;
2011 QCameraStream *thumb_stream = NULL;
2012 mm_camera_buf_def_t *main_frame = NULL;
2013 mm_camera_buf_def_t *thumb_frame = NULL;
2014 int32_t ret = NO_ERROR;
2015
2016 ret = queryStreams(&main_stream,
2017 &thumb_stream,
2018 &reproc_stream,
2019 &main_frame,
2020 &thumb_frame,
2021 frame,
2022 reproc_frame);
2023 if (NO_ERROR != ret) {
2024 LOGE("Camera streams query from input frames failed %d",
2025 ret);
2026 return ret;
2027 }
2028
2029 if (NULL != main_stream) {
2030 ret = main_stream->syncRuntimeParams();
2031 if (NO_ERROR != ret) {
2032 LOGE("Syncing of main stream runtime parameters failed %d",
2033 ret);
2034 return ret;
2035 }
2036 }
2037
2038 if (NULL != thumb_stream) {
2039 ret = thumb_stream->syncRuntimeParams();
2040 if (NO_ERROR != ret) {
2041 LOGE("Syncing of thumb stream runtime parameters failed %d",
2042 ret);
2043 return ret;
2044 }
2045 }
2046
2047 if ((NULL != reproc_stream) && (reproc_stream != main_stream)) {
2048 ret = reproc_stream->syncRuntimeParams();
2049 if (NO_ERROR != ret) {
2050 LOGE("Syncing of reproc stream runtime parameters failed %d",
2051 ret);
2052 return ret;
2053 }
2054 }
2055
2056 return ret;
2057 }
2058
2059 /*===========================================================================
2060 * FUNCTION : encodeData
2061 *
2062 * DESCRIPTION: function to prepare encoding job information and send to
2063 * mm-jpeg-interface to do the encoding job
2064 *
2065 * PARAMETERS :
2066 * @jpeg_job_data : ptr to a struct saving job related information
2067 * @needNewSess : flag to indicate if a new jpeg encoding session need
2068 * to be created. After creation, this flag will be toggled
2069 *
2070 * RETURN : int32_t type of status
2071 * NO_ERROR -- success
2072 * none-zero failure code
2073 *==========================================================================*/
encodeData(qcamera_jpeg_data_t * jpeg_job_data,uint8_t & needNewSess)2074 int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
2075 uint8_t &needNewSess)
2076 {
2077 LOGD("E");
2078 int32_t ret = NO_ERROR;
2079 mm_jpeg_job_t jpg_job;
2080 uint32_t jobId = 0;
2081 QCameraStream *reproc_stream = NULL;
2082 QCameraStream *main_stream = NULL;
2083 mm_camera_buf_def_t *main_frame = NULL;
2084 QCameraStream *thumb_stream = NULL;
2085 mm_camera_buf_def_t *thumb_frame = NULL;
2086 mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
2087 cam_rect_t crop;
2088 cam_stream_parm_buffer_t param;
2089 cam_stream_img_prop_t imgProp;
2090
2091 // find channel
2092 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2093 // check reprocess channel if not found
2094 if (pChannel == NULL) {
2095 for (int8_t i = 0; i < mPPChannelCount; i++) {
2096 if ((mPPChannels[i] != NULL) &&
2097 (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
2098 pChannel = mPPChannels[i];
2099 break;
2100 }
2101 }
2102 }
2103
2104 if (pChannel == NULL) {
2105 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2106 recvd_frame->ch_id);
2107 return BAD_VALUE;
2108 }
2109
2110 const uint32_t jpeg_rotation = m_parent->mParameters.getJpegRotation();
2111
2112 ret = queryStreams(&main_stream,
2113 &thumb_stream,
2114 &reproc_stream,
2115 &main_frame,
2116 &thumb_frame,
2117 recvd_frame,
2118 jpeg_job_data->src_reproc_frame);
2119 if (NO_ERROR != ret) {
2120 return ret;
2121 }
2122
2123 if(NULL == main_frame){
2124 LOGE("Main frame is NULL");
2125 return BAD_VALUE;
2126 }
2127
2128 if(NULL == thumb_frame){
2129 LOGD("Thumbnail frame does not exist");
2130 }
2131
2132 QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
2133 if (NULL == memObj) {
2134 LOGE("Memeory Obj of main frame is NULL");
2135 return NO_MEMORY;
2136 }
2137
2138 // dump snapshot frame if enabled
2139 m_parent->dumpFrameToFile(main_stream, main_frame,
2140 QCAMERA_DUMP_FRM_SNAPSHOT, (char *)"CPP");
2141
2142 // send upperlayer callback for raw image
2143 camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
2144 if (NULL != m_parent->mDataCb &&
2145 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2146 qcamera_callback_argm_t cbArg;
2147 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2148 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2149 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2150 cbArg.data = mem;
2151 cbArg.index = 0;
2152 m_parent->m_cbNotifier.notifyCallback(cbArg);
2153 }
2154 if (NULL != m_parent->mNotifyCb &&
2155 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2156 qcamera_callback_argm_t cbArg;
2157 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2158 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2159 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2160 cbArg.ext1 = 0;
2161 cbArg.ext2 = 0;
2162 m_parent->m_cbNotifier.notifyCallback(cbArg);
2163 }
2164
2165 if (mJpegClientHandle <= 0) {
2166 LOGE("Error: bug here, mJpegClientHandle is 0");
2167 return UNKNOWN_ERROR;
2168 }
2169
2170 if (needNewSess) {
2171 // create jpeg encoding session
2172 mm_jpeg_encode_params_t encodeParam;
2173 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
2174 ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
2175 if (ret != NO_ERROR) {
2176 LOGE("error getting encoding config");
2177 return ret;
2178 }
2179 LOGH("[KPI Perf] : call jpeg create_session");
2180 ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
2181 if (ret != NO_ERROR) {
2182 LOGE("error creating a new jpeg encoding session");
2183 return ret;
2184 }
2185 needNewSess = FALSE;
2186 }
2187 // Fill in new job
2188 memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
2189 jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
2190 jpg_job.encode_job.session_id = mJpegSessionId;
2191 jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
2192 jpg_job.encode_job.dst_index = 0;
2193
2194 if (mJpegMemOpt) {
2195 jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
2196 } else if (mUseJpegBurst) {
2197 jpg_job.encode_job.dst_index = -1;
2198 }
2199
2200 // use src to reproc frame as work buffer; if src buf is not available
2201 // jpeg interface will allocate work buffer
2202 if (jpeg_job_data->src_reproc_frame != NULL) {
2203 int32_t ret = NO_ERROR;
2204 QCameraStream *main_stream = NULL;
2205 mm_camera_buf_def_t *main_frame = NULL;
2206 QCameraStream *thumb_stream = NULL;
2207 mm_camera_buf_def_t *thumb_frame = NULL;
2208 QCameraStream *reproc_stream = NULL;
2209 mm_camera_buf_def_t *workBuf = NULL;
2210 // Call queryStreams to fetch source of reproc frame
2211 ret = queryStreams(&main_stream,
2212 &thumb_stream,
2213 &reproc_stream,
2214 &main_frame,
2215 &thumb_frame,
2216 jpeg_job_data->src_reproc_frame,
2217 NULL);
2218
2219 if ((NO_ERROR == ret) && ((workBuf = main_frame) != NULL)
2220 && !m_parent->isLowPowerMode()) {
2221 camera_memory_t *camWorkMem = NULL;
2222 int workBufIndex = workBuf->buf_idx;
2223 QCameraMemory *workMem = (QCameraMemory *)workBuf->mem_info;
2224 if (workMem != NULL) {
2225 camWorkMem = workMem->getMemory(workBufIndex, false);
2226 }
2227 if (camWorkMem != NULL && workMem != NULL) {
2228 jpg_job.encode_job.work_buf.buf_size = camWorkMem->size;
2229 jpg_job.encode_job.work_buf.buf_vaddr = (uint8_t *)camWorkMem->data;
2230 jpg_job.encode_job.work_buf.fd = workMem->getFd(workBufIndex);
2231 workMem->invalidateCache(workBufIndex);
2232 }
2233 }
2234 }
2235
2236 cam_dimension_t src_dim;
2237 memset(&src_dim, 0, sizeof(cam_dimension_t));
2238 main_stream->getFrameDimension(src_dim);
2239
2240 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
2241 bool img_feature_enabled =
2242 m_parent->mParameters.isUbiFocusEnabled() ||
2243 m_parent->mParameters.isUbiRefocus() ||
2244 m_parent->mParameters.isChromaFlashEnabled() ||
2245 m_parent->mParameters.isOptiZoomEnabled() ||
2246 m_parent->mParameters.isStillMoreEnabled();
2247
2248 LOGH("Crop needed %d", img_feature_enabled);
2249 crop.left = 0;
2250 crop.top = 0;
2251 crop.height = src_dim.height;
2252 crop.width = src_dim.width;
2253
2254 param = main_stream->getOutputCrop();
2255 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2256 if (param.outputCrop.crop_info[i].stream_id
2257 == main_stream->getMyServerID()) {
2258 crop = param.outputCrop.crop_info[i].crop;
2259 main_stream->setCropInfo(crop);
2260 }
2261 }
2262 if (img_feature_enabled) {
2263 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2264
2265 param = main_stream->getImgProp();
2266 imgProp = param.imgProp;
2267 main_stream->setCropInfo(imgProp.crop);
2268 crop = imgProp.crop;
2269 thumb_stream = NULL; /* use thumbnail from main image */
2270
2271 if ((reproc_stream != NULL) && (m_DataMem == NULL) &&
2272 m_parent->mParameters.isUbiRefocus()) {
2273
2274 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2275 cam_misc_buf_t* refocusResult =
2276 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2277 uint32_t resultSize = refocusResult->header_size +
2278 refocusResult->width * refocusResult->height;
2279 camera_memory_t *dataMem = m_parent->mGetMemory(-1, resultSize,
2280 1, m_parent->mCallbackCookie);
2281
2282 LOGH("Refocus result header %u dims %dx%d",
2283 resultSize, refocusResult->width, refocusResult->height);
2284
2285 if (dataMem && dataMem->data) {
2286 memcpy(dataMem->data, refocusResult->data, resultSize);
2287 //save mem pointer for depth map
2288 m_DataMem = dataMem;
2289 }
2290 }
2291 } else if ((reproc_stream != NULL) && (m_parent->mParameters.isTruePortraitEnabled())) {
2292
2293 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2294 cam_misc_buf_t* tpResult =
2295 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2296 uint32_t tpMetaSize = tpResult->header_size + tpResult->width * tpResult->height;
2297
2298 LOGH("True portrait result header %d% dims dx%d",
2299 tpMetaSize, tpResult->width, tpResult->height);
2300
2301 CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"tp", "bm", -1, "y",
2302 &tpResult->data, tpMetaSize);
2303 }
2304
2305 cam_dimension_t dst_dim;
2306
2307 if (hdr_output_crop && crop.height) {
2308 dst_dim.height = crop.height;
2309 } else {
2310 dst_dim.height = src_dim.height;
2311 }
2312 if (hdr_output_crop && crop.width) {
2313 dst_dim.width = crop.width;
2314 } else {
2315 dst_dim.width = src_dim.width;
2316 }
2317
2318 // main dim
2319 jpg_job.encode_job.main_dim.src_dim = src_dim;
2320 jpg_job.encode_job.main_dim.dst_dim = dst_dim;
2321 jpg_job.encode_job.main_dim.crop = crop;
2322
2323 // get 3a sw version info
2324 cam_q3a_version_t sw_version =
2325 m_parent->getCamHalCapabilities()->q3a_version;
2326
2327 // get exif data
2328 QCameraExif *pJpegExifObj = m_parent->getExifData();
2329 jpeg_job_data->pJpegExifObj = pJpegExifObj;
2330 if (pJpegExifObj != NULL) {
2331 jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
2332 jpg_job.encode_job.exif_info.numOfEntries =
2333 pJpegExifObj->getNumOfEntries();
2334 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
2335 sw_version.major_version;
2336 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
2337 sw_version.minor_version;
2338 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
2339 sw_version.patch_version;
2340 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
2341 sw_version.new_feature_des;
2342 }
2343
2344 // set rotation only when no online rotation or offline pp rotation is done before
2345 if (!m_parent->needRotationReprocess()) {
2346 jpg_job.encode_job.rotation = jpeg_rotation;
2347 }
2348 LOGH("jpeg rotation is set to %d", jpg_job.encode_job.rotation);
2349
2350 // thumbnail dim
2351 if (m_bThumbnailNeeded == TRUE) {
2352 m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
2353
2354 if (thumb_stream == NULL) {
2355 // need jpeg thumbnail, but no postview/preview stream exists
2356 // we use the main stream/frame to encode thumbnail
2357 thumb_stream = main_stream;
2358 thumb_frame = main_frame;
2359 }
2360 if (m_parent->needRotationReprocess() &&
2361 ((90 == jpeg_rotation) || (270 == jpeg_rotation))) {
2362 // swap thumbnail dimensions
2363 cam_dimension_t tmp_dim = jpg_job.encode_job.thumb_dim.dst_dim;
2364 jpg_job.encode_job.thumb_dim.dst_dim.width = tmp_dim.height;
2365 jpg_job.encode_job.thumb_dim.dst_dim.height = tmp_dim.width;
2366 }
2367
2368 memset(&src_dim, 0, sizeof(cam_dimension_t));
2369 thumb_stream->getFrameDimension(src_dim);
2370 jpg_job.encode_job.thumb_dim.src_dim = src_dim;
2371
2372 // crop is the same if frame is the same
2373 if (thumb_frame != main_frame) {
2374 crop.left = 0;
2375 crop.top = 0;
2376 crop.height = src_dim.height;
2377 crop.width = src_dim.width;
2378
2379 param = thumb_stream->getOutputCrop();
2380 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2381 if (param.outputCrop.crop_info[i].stream_id
2382 == thumb_stream->getMyServerID()) {
2383 crop = param.outputCrop.crop_info[i].crop;
2384 thumb_stream->setCropInfo(crop);
2385 }
2386 }
2387 }
2388
2389
2390 jpg_job.encode_job.thumb_dim.crop = crop;
2391 if (thumb_frame != NULL) {
2392 jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
2393 }
2394 LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2395 jpg_job.encode_job.thumb_index,
2396 jpg_job.encode_job.thumb_dim.src_dim.width,
2397 jpg_job.encode_job.thumb_dim.src_dim.height,
2398 jpg_job.encode_job.thumb_dim.dst_dim.width,
2399 jpg_job.encode_job.thumb_dim.dst_dim.height);
2400 }
2401
2402 LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2403 jpg_job.encode_job.src_index,
2404 jpg_job.encode_job.main_dim.src_dim.width,
2405 jpg_job.encode_job.main_dim.src_dim.height,
2406 jpg_job.encode_job.main_dim.dst_dim.width,
2407 jpg_job.encode_job.main_dim.dst_dim.height);
2408
2409 if (thumb_frame != NULL) {
2410 // dump thumbnail frame if enabled
2411 m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
2412 }
2413
2414 if (jpeg_job_data->metadata != NULL) {
2415 // fill in meta data frame ptr
2416 jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
2417 }
2418
2419 jpg_job.encode_job.hal_version = CAM_HAL_V1;
2420 m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
2421 jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
2422 jpg_job.encode_job.cam_exif_params.debug_params =
2423 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
2424 if (!jpg_job.encode_job.cam_exif_params.debug_params) {
2425 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
2426 return NO_MEMORY;
2427 }
2428
2429 jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
2430
2431
2432 if (NULL != jpg_job.encode_job.p_metadata && (jpg_job.encode_job.mobicat_mask > 0)) {
2433
2434 if (m_parent->mExifParams.debug_params) {
2435 memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
2436 m_parent->mExifParams.debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
2437
2438 /* Save a copy of mobicat params */
2439 jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
2440 jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
2441
2442 if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
2443 jpg_job.encode_job.p_metadata->mobicat_aec_params =
2444 jpg_job.encode_job.cam_exif_params.cam_3a_params;
2445 }
2446
2447 /* Save a copy of 3A debug params */
2448 jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
2449 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
2450 jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
2451 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
2452 jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
2453 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
2454 jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
2455 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
2456 jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
2457 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
2458
2459 if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
2460 jpg_job.encode_job.p_metadata->statsdebug_ae_data =
2461 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
2462 }
2463 if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
2464 jpg_job.encode_job.p_metadata->statsdebug_awb_data =
2465 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
2466 }
2467 if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
2468 jpg_job.encode_job.p_metadata->statsdebug_af_data =
2469 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
2470 }
2471 if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
2472 jpg_job.encode_job.p_metadata->statsdebug_asd_data =
2473 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
2474 }
2475 if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
2476 jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
2477 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
2478 }
2479 }
2480
2481 }
2482
2483 /* Init the QTable */
2484 for (int i = 0; i < QTABLE_MAX; i++) {
2485 jpg_job.encode_job.qtable_set[i] = 0;
2486 }
2487
2488 const cam_sync_related_sensors_event_info_t* related_cam_info =
2489 m_parent->getRelatedCamSyncInfo();
2490 if (related_cam_info->sync_control == CAM_SYNC_RELATED_SENSORS_ON &&
2491 m_parent->getMpoComposition()) {
2492 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_MPO;
2493 if (related_cam_info->type == CAM_TYPE_MAIN ) {
2494 jpg_job.encode_job.multi_image_info.is_primary = TRUE;
2495 LOGD("Encoding MPO Primary JPEG");
2496 } else {
2497 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2498 LOGD("Encoding MPO Aux JPEG");
2499 }
2500 jpg_job.encode_job.multi_image_info.num_of_images = 2;
2501 } else {
2502 LOGD("Encoding Single JPEG");
2503 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
2504 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2505 jpg_job.encode_job.multi_image_info.num_of_images = 1;
2506 }
2507
2508 LOGI("[KPI Perf] : PROFILE_JPEG_JOB_START");
2509 ret = mJpegHandle.start_job(&jpg_job, &jobId);
2510 if (jpg_job.encode_job.cam_exif_params.debug_params) {
2511 free(jpg_job.encode_job.cam_exif_params.debug_params);
2512 }
2513 if (ret == NO_ERROR) {
2514 // remember job info
2515 jpeg_job_data->jobId = jobId;
2516 }
2517
2518 return ret;
2519 }
2520
2521 /*===========================================================================
2522 * FUNCTION : processRawImageImpl
2523 *
2524 * DESCRIPTION: function to send raw image to upper layer
2525 *
2526 * PARAMETERS :
2527 * @recvd_frame : frame to be encoded
2528 *
2529 * RETURN : int32_t type of status
2530 * NO_ERROR -- success
2531 * none-zero failure code
2532 *==========================================================================*/
processRawImageImpl(mm_camera_super_buf_t * recvd_frame)2533 int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
2534 {
2535 int32_t rc = NO_ERROR;
2536
2537 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2538 QCameraStream *pStream = NULL;
2539 mm_camera_buf_def_t *frame = NULL;
2540 // check reprocess channel if not found
2541 if (pChannel == NULL) {
2542 for (int8_t i = 0; i < mPPChannelCount; i++) {
2543 if ((mPPChannels[i] != NULL) &&
2544 (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
2545 pChannel = mPPChannels[i];
2546 break;
2547 }
2548 }
2549 }
2550 if (pChannel == NULL) {
2551 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2552 recvd_frame->ch_id);
2553 return BAD_VALUE;
2554 }
2555
2556 // find snapshot frame
2557 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
2558 QCameraStream *pCurStream =
2559 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
2560 if (pCurStream != NULL) {
2561 if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2562 pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
2563 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2564 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
2565 pStream = pCurStream;
2566 frame = recvd_frame->bufs[i];
2567 break;
2568 }
2569 }
2570 }
2571
2572 if ( NULL == frame ) {
2573 LOGE("No valid raw buffer");
2574 return BAD_VALUE;
2575 }
2576
2577 QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
2578 bool zslChannelUsed = m_parent->isZSLMode() &&
2579 ( pChannel != mPPChannels[0] );
2580 camera_memory_t *raw_mem = NULL;
2581
2582 if (rawMemObj != NULL) {
2583 if (zslChannelUsed) {
2584 raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
2585 } else {
2586 raw_mem = m_parent->mGetMemory(-1,
2587 frame->frame_len,
2588 1,
2589 m_parent->mCallbackCookie);
2590 if (NULL == raw_mem) {
2591 LOGE("Not enough memory for RAW cb ");
2592 return NO_MEMORY;
2593 }
2594 memcpy(raw_mem->data, frame->buffer, frame->frame_len);
2595 }
2596 }
2597
2598 if (NULL != rawMemObj && NULL != raw_mem) {
2599 // dump frame into file
2600 if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
2601 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
2602 // for YUV422 NV16 case
2603 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_SNAPSHOT);
2604 } else {
2605 //Received RAW snapshot taken notification
2606 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
2607
2608 if(true == m_parent->m_bIntRawEvtPending) {
2609 //Sending RAW snapshot taken notification to HAL
2610 memset(&m_dst_dim, 0, sizeof(m_dst_dim));
2611 pStream->getFrameDimension(m_dst_dim);
2612 pthread_mutex_lock(&m_parent->m_int_lock);
2613 pthread_cond_signal(&m_parent->m_int_cond);
2614 pthread_mutex_unlock(&m_parent->m_int_lock);
2615 raw_mem->release(raw_mem);
2616 return rc;
2617 }
2618 }
2619
2620 // send data callback / notify for RAW_IMAGE
2621 if (NULL != m_parent->mDataCb &&
2622 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2623 qcamera_callback_argm_t cbArg;
2624 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2625 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2626 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2627 cbArg.data = raw_mem;
2628 cbArg.index = 0;
2629 m_parent->m_cbNotifier.notifyCallback(cbArg);
2630 }
2631 if (NULL != m_parent->mNotifyCb &&
2632 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2633 qcamera_callback_argm_t cbArg;
2634 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2635 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2636 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2637 cbArg.ext1 = 0;
2638 cbArg.ext2 = 0;
2639 m_parent->m_cbNotifier.notifyCallback(cbArg);
2640 }
2641
2642 if ((m_parent->mDataCb != NULL) &&
2643 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
2644 qcamera_release_data_t release_data;
2645 memset(&release_data, 0, sizeof(qcamera_release_data_t));
2646 if ( zslChannelUsed ) {
2647 release_data.frame = recvd_frame;
2648 } else {
2649 release_data.data = raw_mem;
2650 }
2651 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
2652 raw_mem,
2653 0,
2654 NULL,
2655 &release_data);
2656 } else {
2657 raw_mem->release(raw_mem);
2658 }
2659 } else {
2660 LOGE("Cannot get raw mem");
2661 rc = UNKNOWN_ERROR;
2662 }
2663
2664 return rc;
2665 }
2666
2667 /*===========================================================================
2668 * FUNCTION : dataSaveRoutine
2669 *
2670 * DESCRIPTION: data saving routine
2671 *
2672 * PARAMETERS :
2673 * @data : user data ptr (QCameraPostProcessor)
2674 *
2675 * RETURN : None
2676 *==========================================================================*/
dataSaveRoutine(void * data)2677 void *QCameraPostProcessor::dataSaveRoutine(void *data)
2678 {
2679 int running = 1;
2680 int ret;
2681 uint8_t is_active = FALSE;
2682 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
2683 QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
2684 cmdThread->setName("CAM_JpegSave");
2685 char saveName[PROPERTY_VALUE_MAX];
2686
2687 LOGH("E");
2688 do {
2689 do {
2690 ret = cam_sem_wait(&cmdThread->cmd_sem);
2691 if (ret != 0 && errno != EINVAL) {
2692 LOGE("cam_sem_wait error (%s)",
2693 strerror(errno));
2694 return NULL;
2695 }
2696 } while (ret != 0);
2697
2698 // we got notified about new cmd avail in cmd queue
2699 camera_cmd_type_t cmd = cmdThread->getCmd();
2700 switch (cmd) {
2701 case CAMERA_CMD_TYPE_START_DATA_PROC:
2702 LOGH("start data proc");
2703 is_active = TRUE;
2704 pme->m_inputSaveQ.init();
2705 break;
2706 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
2707 {
2708 LOGH("stop data proc");
2709 is_active = FALSE;
2710
2711 // flush input save Queue
2712 pme->m_inputSaveQ.flush();
2713
2714 // signal cmd is completed
2715 cam_sem_post(&cmdThread->sync_sem);
2716 }
2717 break;
2718 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
2719 {
2720 LOGH("Do next job, active is %d", is_active);
2721
2722 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
2723 if (job_data == NULL) {
2724 LOGE("Invalid jpeg event data");
2725 continue;
2726 }
2727 //qcamera_jpeg_data_t *jpeg_job =
2728 // (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue(false);
2729 //uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;
2730 uint32_t frame_idx = 75;
2731
2732 pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
2733
2734 LOGH("[KPI Perf] : jpeg job %d", job_data->jobId);
2735
2736 if (is_active == TRUE) {
2737 memset(saveName, '\0', sizeof(saveName));
2738 snprintf(saveName,
2739 sizeof(saveName),
2740 QCameraPostProcessor::STORE_LOCATION,
2741 pme->mSaveFrmCnt);
2742
2743 int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
2744 if (file_fd >= 0) {
2745 ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
2746 job_data->out_data.buf_filled_len);
2747 if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
2748 LOGE("Failed save complete data %d bytes "
2749 "written instead of %d bytes!",
2750 written_len,
2751 job_data->out_data.buf_filled_len);
2752 } else {
2753 LOGH("written number of bytes %d\n",
2754 written_len);
2755 }
2756
2757 close(file_fd);
2758 } else {
2759 LOGE("fail t open file for saving");
2760 }
2761 pme->mSaveFrmCnt++;
2762
2763 camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
2764 strlen(saveName),
2765 1,
2766 pme->m_parent->mCallbackCookie);
2767 if (NULL == jpeg_mem) {
2768 ret = NO_MEMORY;
2769 LOGE("getMemory for jpeg, ret = NO_MEMORY");
2770 goto end;
2771 }
2772 memcpy(jpeg_mem->data, saveName, strlen(saveName));
2773
2774 LOGH("Calling upperlayer callback to store JPEG image");
2775 qcamera_release_data_t release_data;
2776 memset(&release_data, 0, sizeof(qcamera_release_data_t));
2777 release_data.data = jpeg_mem;
2778 release_data.unlinkFile = true;
2779 LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
2780 ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
2781 jpeg_mem,
2782 0,
2783 NULL,
2784 &release_data,
2785 frame_idx);
2786 }
2787
2788 end:
2789 free(job_data);
2790 }
2791 break;
2792 case CAMERA_CMD_TYPE_EXIT:
2793 LOGH("save thread exit");
2794 running = 0;
2795 break;
2796 default:
2797 break;
2798 }
2799 } while (running);
2800 LOGH("X");
2801 return NULL;
2802 }
2803
2804 /*===========================================================================
2805 * FUNCTION : dataProcessRoutine
2806 *
2807 * DESCRIPTION: data process routine that handles input data either from input
2808 * Jpeg Queue to do jpeg encoding, or from input PP Queue to do
2809 * reprocess.
2810 *
2811 * PARAMETERS :
2812 * @data : user data ptr (QCameraPostProcessor)
2813 *
2814 * RETURN : None
2815 *==========================================================================*/
dataProcessRoutine(void * data)2816 void *QCameraPostProcessor::dataProcessRoutine(void *data)
2817 {
2818 int running = 1;
2819 int ret;
2820 uint8_t is_active = FALSE;
2821 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
2822 QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
2823 cmdThread->setName("CAM_DataProc");
2824
2825 LOGH("E");
2826 do {
2827 do {
2828 ret = cam_sem_wait(&cmdThread->cmd_sem);
2829 if (ret != 0 && errno != EINVAL) {
2830 LOGE("cam_sem_wait error (%s)",
2831 strerror(errno));
2832 return NULL;
2833 }
2834 } while (ret != 0);
2835
2836 // we got notified about new cmd avail in cmd queue
2837 camera_cmd_type_t cmd = cmdThread->getCmd();
2838 switch (cmd) {
2839 case CAMERA_CMD_TYPE_START_DATA_PROC:
2840 LOGH("start data proc");
2841 is_active = TRUE;
2842
2843 pme->m_ongoingPPQ.init();
2844 pme->m_inputJpegQ.init();
2845 pme->m_inputPPQ.init();
2846 pme->m_inputRawQ.init();
2847
2848 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
2849 FALSE,
2850 FALSE);
2851
2852 // signal cmd is completed
2853 cam_sem_post(&cmdThread->sync_sem);
2854
2855 break;
2856 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
2857 {
2858 LOGH("stop data proc");
2859 is_active = FALSE;
2860
2861 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
2862 TRUE,
2863 TRUE);
2864 // cancel all ongoing jpeg jobs
2865 qcamera_jpeg_data_t *jpeg_job =
2866 (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
2867 while (jpeg_job != NULL) {
2868 pme->mJpegHandle.abort_job(jpeg_job->jobId);
2869
2870 pme->releaseJpegJobData(jpeg_job);
2871 free(jpeg_job);
2872
2873 jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
2874 }
2875
2876 // destroy jpeg encoding session
2877 if ( 0 < pme->mJpegSessionId ) {
2878 pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
2879 pme->mJpegSessionId = 0;
2880 }
2881
2882 // free jpeg out buf and exif obj
2883 FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
2884 pme->m_JpegOutputMemCount);
2885
2886 if (pme->m_pJpegExifObj != NULL) {
2887 delete pme->m_pJpegExifObj;
2888 pme->m_pJpegExifObj = NULL;
2889 }
2890
2891 // flush ongoing postproc Queue
2892 pme->m_ongoingPPQ.flush();
2893
2894 // flush input jpeg Queue
2895 pme->m_inputJpegQ.flush();
2896
2897 // flush input Postproc Queue
2898 pme->m_inputPPQ.flush();
2899
2900 // flush input raw Queue
2901 pme->m_inputRawQ.flush();
2902
2903 // signal cmd is completed
2904 cam_sem_post(&cmdThread->sync_sem);
2905
2906 pme->mNewJpegSessionNeeded = true;
2907 }
2908 break;
2909 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
2910 {
2911 LOGH("Do next job, active is %d", is_active);
2912 if (is_active == TRUE) {
2913 qcamera_jpeg_data_t *jpeg_job =
2914 (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
2915
2916 if (NULL != jpeg_job) {
2917 // To avoid any race conditions,
2918 // sync any stream specific parameters here.
2919 if (pme->m_parent->mParameters.isAdvCamFeaturesEnabled()) {
2920 // Sync stream params, only if advanced features configured
2921 // Reduces the latency for normal snapshot.
2922 pme->syncStreamParams(jpeg_job->src_frame, NULL);
2923 }
2924
2925 // add into ongoing jpeg job Q
2926 if (pme->m_ongoingJpegQ.enqueue((void *)jpeg_job)) {
2927 ret = pme->encodeData(jpeg_job,
2928 pme->mNewJpegSessionNeeded);
2929 if (NO_ERROR != ret) {
2930 // dequeue the last one
2931 pme->m_ongoingJpegQ.dequeue(false);
2932 pme->releaseJpegJobData(jpeg_job);
2933 free(jpeg_job);
2934 jpeg_job = NULL;
2935 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
2936 }
2937 } else {
2938 LOGW("m_ongoingJpegQ is not active!!!");
2939 pme->releaseJpegJobData(jpeg_job);
2940 free(jpeg_job);
2941 jpeg_job = NULL;
2942 }
2943 }
2944
2945
2946 // process raw data if any
2947 mm_camera_super_buf_t *super_buf =
2948 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
2949
2950 if (NULL != super_buf) {
2951 //play shutter sound
2952 pme->m_parent->playShutter();
2953 ret = pme->processRawImageImpl(super_buf);
2954 if (NO_ERROR != ret) {
2955 pme->releaseSuperBuf(super_buf);
2956 free(super_buf);
2957 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
2958 }
2959 }
2960
2961 ret = pme->doReprocess();
2962 if (NO_ERROR != ret) {
2963 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
2964 } else {
2965 ret = pme->stopCapture();
2966 }
2967
2968 } else {
2969 // not active, simply return buf and do no op
2970 qcamera_jpeg_data_t *jpeg_data =
2971 (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
2972 if (NULL != jpeg_data) {
2973 pme->releaseJpegJobData(jpeg_data);
2974 free(jpeg_data);
2975 }
2976 mm_camera_super_buf_t *super_buf =
2977 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
2978 if (NULL != super_buf) {
2979 pme->releaseSuperBuf(super_buf);
2980 free(super_buf);
2981 }
2982
2983 // flush input Postproc Queue
2984 pme->m_inputPPQ.flush();
2985 }
2986 }
2987 break;
2988 case CAMERA_CMD_TYPE_EXIT:
2989 running = 0;
2990 break;
2991 default:
2992 break;
2993 }
2994 } while (running);
2995 LOGH("X");
2996 return NULL;
2997 }
2998
2999 /*===========================================================================
3000 * FUNCTION : doReprocess
3001 *
3002 * DESCRIPTION: Trigger channel reprocessing
3003 *
3004 * PARAMETERS :None
3005 *
3006 * RETURN : int32_t type of status
3007 * NO_ERROR -- success
3008 * none-zero failure code
3009 *==========================================================================*/
doReprocess()3010 int32_t QCameraPostProcessor::doReprocess()
3011 {
3012 int32_t ret = NO_ERROR;
3013 QCameraChannel *m_pSrcChannel = NULL;
3014 QCameraStream *pMetaStream = NULL;
3015 uint8_t meta_buf_index = 0;
3016 mm_camera_buf_def_t *meta_buf = NULL;
3017 mm_camera_super_buf_t *ppInputFrame = NULL;
3018
3019 qcamera_pp_data_t *ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.peek();
3020 if ((ppreq_job == NULL) || (ppreq_job->src_frame == NULL)) {
3021 return ret;
3022 }
3023
3024 if (!validatePostProcess(ppreq_job->src_frame)) {
3025 return ret;
3026 }
3027
3028 ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.dequeue();
3029 if (ppreq_job == NULL || ppreq_job->src_frame == NULL ||
3030 ppreq_job->src_reproc_frame == NULL) {
3031 return ret;
3032 }
3033
3034 mm_camera_super_buf_t *src_frame = ppreq_job->src_frame;
3035 mm_camera_super_buf_t *src_reproc_frame = ppreq_job->src_reproc_frame;
3036 int8_t mCurReprocCount = ppreq_job->reprocCount;
3037 int8_t mCurChannelIdx = ppreq_job->ppChannelIndex;
3038
3039 LOGD("frame = %p src_frame = %p mCurReprocCount = %d mCurChannelIdx = %d",
3040 src_frame,src_reproc_frame,mCurReprocCount, mCurChannelIdx);
3041
3042 if ((m_parent->mParameters.getManualCaptureMode() >=
3043 CAM_MANUAL_CAPTURE_TYPE_3) && (mCurChannelIdx == 0)) {
3044 ppInputFrame = src_reproc_frame;
3045 } else {
3046 ppInputFrame = src_frame;
3047 }
3048
3049 if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
3050 LOGE("invalid channel count");
3051 return UNKNOWN_ERROR;
3052 }
3053
3054 // find meta data stream and index of meta data frame in the superbuf
3055 for (int8_t j = 0; j < mPPChannelCount; j++) {
3056 /*First search in src buffer for any offline metadata */
3057 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
3058 QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
3059 src_frame->bufs[i]->stream_id);
3060 if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
3061 meta_buf_index = (uint8_t) src_frame->bufs[i]->buf_idx;
3062 pMetaStream = pStream;
3063 meta_buf = src_frame->bufs[i];
3064 break;
3065 }
3066 }
3067
3068 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3069 LOGD("Found Offline stream metadata = %d",
3070 (int)meta_buf_index);
3071 break;
3072 }
3073 }
3074
3075 if ((pMetaStream == NULL) && (meta_buf == NULL)) {
3076 for (int8_t j = 0; j < mPPChannelCount; j++) {
3077 m_pSrcChannel = mPPChannels[j]->getSrcChannel();
3078 if (m_pSrcChannel == NULL)
3079 continue;
3080 for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
3081 QCameraStream *pStream =
3082 m_pSrcChannel->getStreamByHandle(
3083 src_reproc_frame->bufs[i]->stream_id);
3084 if (pStream != NULL && pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
3085 meta_buf_index = (uint8_t) src_reproc_frame->bufs[i]->buf_idx;
3086 pMetaStream = pStream;
3087 meta_buf = src_reproc_frame->bufs[i];
3088 break;
3089 }
3090 }
3091 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3092 LOGD("Found Meta data info for reprocessing index = %d",
3093 (int)meta_buf_index);
3094 break;
3095 }
3096 }
3097 }
3098
3099 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
3100 // No need to sync stream params, if none of the advanced features configured
3101 // Reduces the latency for normal snapshot.
3102 syncStreamParams(src_frame, src_reproc_frame);
3103 }
3104 if (mPPChannels[mCurChannelIdx] != NULL) {
3105 // add into ongoing PP job Q
3106 ppreq_job->reprocCount = (int8_t) (mCurReprocCount + 1);
3107
3108 if ((m_parent->isRegularCapture()) || (ppreq_job->offline_buffer)) {
3109 m_bufCountPPQ++;
3110 if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3111 ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
3112 meta_buf, m_parent->mParameters);
3113 if (ret != NO_ERROR) {
3114 goto end;
3115 }
3116
3117 if ((ppreq_job->offline_buffer) &&
3118 (ppreq_job->offline_reproc_buf)) {
3119 mPPChannels[mCurChannelIdx]->doReprocessOffline(
3120 ppreq_job->offline_reproc_buf, meta_buf);
3121 }
3122 } else {
3123 LOGW("m_ongoingPPQ is not active!!!");
3124 ret = UNKNOWN_ERROR;
3125 goto end;
3126 }
3127 } else {
3128 m_bufCountPPQ++;
3129 if (!m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3130 LOGW("m_ongoingJpegQ is not active!!!");
3131 ret = UNKNOWN_ERROR;
3132 goto end;
3133 }
3134
3135 int32_t numRequiredPPQBufsForSingleOutput = (int32_t)
3136 m_parent->mParameters.getNumberInBufsForSingleShot();
3137
3138 if (m_bufCountPPQ % numRequiredPPQBufsForSingleOutput == 0) {
3139 int32_t extra_pp_job_count =
3140 m_parent->mParameters.getNumberOutBufsForSingleShot() -
3141 m_parent->mParameters.getNumberInBufsForSingleShot();
3142
3143 for (int32_t i = 0; i < extra_pp_job_count; i++) {
3144 qcamera_pp_data_t *extra_pp_job =
3145 (qcamera_pp_data_t *)calloc(1, sizeof(qcamera_pp_data_t));
3146 if (!extra_pp_job) {
3147 LOGE("no mem for qcamera_pp_data_t");
3148 ret = NO_MEMORY;
3149 break;
3150 }
3151 extra_pp_job->reprocCount = ppreq_job->reprocCount;
3152 if (!m_ongoingPPQ.enqueue((void *)extra_pp_job)) {
3153 LOGW("m_ongoingJpegQ is not active!!!");
3154 releaseOngoingPPData(extra_pp_job, this);
3155 free(extra_pp_job);
3156 extra_pp_job = NULL;
3157 goto end;
3158 }
3159 }
3160 }
3161
3162 ret = mPPChannels[mCurChannelIdx]->doReprocess(ppInputFrame,
3163 m_parent->mParameters, pMetaStream, meta_buf_index);
3164 }
3165 } else {
3166 LOGE("Reprocess channel is NULL");
3167 ret = UNKNOWN_ERROR;
3168 }
3169
3170 end:
3171 if (ret != NO_ERROR) {
3172 releaseOngoingPPData(ppreq_job, this);
3173 if (ppreq_job != NULL) {
3174 free(ppreq_job);
3175 ppreq_job = NULL;
3176 }
3177 }
3178 return ret;
3179 }
3180
3181 /*===========================================================================
3182 * FUNCTION : getReprocChannel
3183 *
3184 * DESCRIPTION: Returns reprocessing channel handle
3185 *
3186 * PARAMETERS : index for reprocessing array
3187 *
3188 * RETURN : QCameraReprocessChannel * type of pointer
3189 NULL if no reprocessing channel
3190 *==========================================================================*/
getReprocChannel(uint8_t index)3191 QCameraReprocessChannel * QCameraPostProcessor::getReprocChannel(uint8_t index)
3192 {
3193 if (index >= mPPChannelCount) {
3194 LOGE("Invalid index value");
3195 return NULL;
3196 }
3197 return mPPChannels[index];
3198 }
3199
3200 /*===========================================================================
3201 * FUNCTION : stopCapture
3202 *
3203 * DESCRIPTION: Trigger image capture stop
3204 *
3205 * PARAMETERS :
3206 * None
3207 *
3208 * RETURN : int32_t type of status
3209 * NO_ERROR -- success
3210 * none-zero failure code
3211 *==========================================================================*/
stopCapture()3212 int32_t QCameraPostProcessor::stopCapture()
3213 {
3214 int rc = NO_ERROR;
3215
3216 if (m_parent->isRegularCapture()) {
3217 rc = m_parent->processAPI(
3218 QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
3219 NULL);
3220 }
3221
3222 return rc;
3223 }
3224
3225 /*===========================================================================
3226 * FUNCTION : getJpegPaddingReq
3227 *
3228 * DESCRIPTION: function to add an entry to exif data
3229 *
3230 * PARAMETERS :
3231 * @padding_info : jpeg specific padding requirement
3232 *
3233 * RETURN : int32_t type of status
3234 * NO_ERROR -- success
3235 * none-zero failure code
3236 *==========================================================================*/
getJpegPaddingReq(cam_padding_info_t & padding_info)3237 int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
3238 {
3239 // TODO: hardcode for now, needs to query from mm-jpeg-interface
3240 padding_info.width_padding = CAM_PAD_NONE;
3241 padding_info.height_padding = CAM_PAD_TO_16;
3242 padding_info.plane_padding = CAM_PAD_TO_WORD;
3243 padding_info.offset_info.offset_x = 0;
3244 padding_info.offset_info.offset_y = 0;
3245 return NO_ERROR;
3246 }
3247
3248 /*===========================================================================
3249 * FUNCTION : setYUVFrameInfo
3250 *
3251 * DESCRIPTION: set Raw YUV frame data info for up-layer
3252 *
3253 * PARAMETERS :
3254 * @frame : process frame received from mm-camera-interface
3255 *
3256 * RETURN : int32_t type of status
3257 * NO_ERROR -- success
3258 * none-zero failure code
3259 *
3260 * NOTE : currently we return frame len, y offset, cbcr offset and frame format
3261 *==========================================================================*/
setYUVFrameInfo(mm_camera_super_buf_t * recvd_frame)3262 int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
3263 {
3264 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
3265 // check reprocess channel if not found
3266 if (pChannel == NULL) {
3267 for (int8_t i = 0; i < mPPChannelCount; i++) {
3268 if ((mPPChannels[i] != NULL) &&
3269 (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
3270 pChannel = mPPChannels[i];
3271 break;
3272 }
3273 }
3274 }
3275
3276 if (pChannel == NULL) {
3277 LOGE("No corresponding channel (ch_id = %d) exist, return here",
3278 recvd_frame->ch_id);
3279 return BAD_VALUE;
3280 }
3281
3282 // find snapshot frame
3283 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
3284 QCameraStream *pStream =
3285 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
3286 if (pStream != NULL) {
3287 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
3288 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
3289 //get the main frame, use stream info
3290 cam_frame_len_offset_t frame_offset;
3291 cam_dimension_t frame_dim;
3292 cam_format_t frame_fmt;
3293 const char *fmt_string;
3294 pStream->getFrameDimension(frame_dim);
3295 pStream->getFrameOffset(frame_offset);
3296 pStream->getFormat(frame_fmt);
3297 fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
3298
3299 int cbcr_offset = (int32_t)frame_offset.mp[0].len -
3300 frame_dim.width * frame_dim.height;
3301
3302 LOGH("frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
3303 frame_dim.width, frame_dim.height, frame_offset.mp[0].offset, cbcr_offset, fmt_string);
3304 return NO_ERROR;
3305 }
3306 }
3307 }
3308
3309 return BAD_VALUE;
3310 }
3311
matchJobId(void * data,void *,void * match_data)3312 bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
3313 {
3314 qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
3315 uint32_t job_id = *((uint32_t *) match_data);
3316 return job->jobId == job_id;
3317 }
3318
3319 /*===========================================================================
3320 * FUNCTION : getJpegMemory
3321 *
3322 * DESCRIPTION: buffer allocation function
3323 * to pass to jpeg interface
3324 *
3325 * PARAMETERS :
3326 * @out_buf : buffer descriptor struct
3327 *
3328 * RETURN : int32_t type of status
3329 * NO_ERROR -- success
3330 * none-zero failure code
3331 *==========================================================================*/
getJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3332 int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3333 {
3334 LOGH("Allocating jpeg out buffer of size: %d", out_buf->size);
3335 QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
3336 camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(out_buf->fd, out_buf->size, 1U,
3337 procInst->m_parent->mCallbackCookie);
3338 out_buf->mem_hdl = cam_mem;
3339 out_buf->vaddr = cam_mem->data;
3340
3341 return 0;
3342 }
3343
3344 /*===========================================================================
3345 * FUNCTION : releaseJpegMemory
3346 *
3347 * DESCRIPTION: release jpeg memory function
3348 * to pass to jpeg interface, in case of abort
3349 *
3350 * PARAMETERS :
3351 * @out_buf : buffer descriptor struct
3352 *
3353 * RETURN : int32_t type of status
3354 * NO_ERROR -- success
3355 * none-zero failure code
3356 *==========================================================================*/
releaseJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3357 int QCameraPostProcessor::releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3358 {
3359 if (out_buf && out_buf->mem_hdl) {
3360 LOGD("releasing jpeg out buffer of size: %d", out_buf->size);
3361 camera_memory_t *cam_mem = (camera_memory_t*)out_buf->mem_hdl;
3362 cam_mem->release(cam_mem);
3363 out_buf->mem_hdl = NULL;
3364 out_buf->vaddr = NULL;
3365 return NO_ERROR;
3366 }
3367 return -1;
3368 }
3369
3370 /*===========================================================================
3371 * FUNCTION : QCameraExif
3372 *
3373 * DESCRIPTION: constructor of QCameraExif
3374 *
3375 * PARAMETERS : None
3376 *
3377 * RETURN : None
3378 *==========================================================================*/
QCameraExif()3379 QCameraExif::QCameraExif()
3380 : m_nNumEntries(0)
3381 {
3382 memset(m_Entries, 0, sizeof(m_Entries));
3383 }
3384
3385 /*===========================================================================
3386 * FUNCTION : ~QCameraExif
3387 *
3388 * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
3389 *
3390 * PARAMETERS : None
3391 *
3392 * RETURN : None
3393 *==========================================================================*/
~QCameraExif()3394 QCameraExif::~QCameraExif()
3395 {
3396 for (uint32_t i = 0; i < m_nNumEntries; i++) {
3397 switch (m_Entries[i].tag_entry.type) {
3398 case EXIF_BYTE:
3399 {
3400 if (m_Entries[i].tag_entry.count > 1 &&
3401 m_Entries[i].tag_entry.data._bytes != NULL) {
3402 free(m_Entries[i].tag_entry.data._bytes);
3403 m_Entries[i].tag_entry.data._bytes = NULL;
3404 }
3405 }
3406 break;
3407 case EXIF_ASCII:
3408 {
3409 if (m_Entries[i].tag_entry.data._ascii != NULL) {
3410 free(m_Entries[i].tag_entry.data._ascii);
3411 m_Entries[i].tag_entry.data._ascii = NULL;
3412 }
3413 }
3414 break;
3415 case EXIF_SHORT:
3416 {
3417 if (m_Entries[i].tag_entry.count > 1 &&
3418 m_Entries[i].tag_entry.data._shorts != NULL) {
3419 free(m_Entries[i].tag_entry.data._shorts);
3420 m_Entries[i].tag_entry.data._shorts = NULL;
3421 }
3422 }
3423 break;
3424 case EXIF_LONG:
3425 {
3426 if (m_Entries[i].tag_entry.count > 1 &&
3427 m_Entries[i].tag_entry.data._longs != NULL) {
3428 free(m_Entries[i].tag_entry.data._longs);
3429 m_Entries[i].tag_entry.data._longs = NULL;
3430 }
3431 }
3432 break;
3433 case EXIF_RATIONAL:
3434 {
3435 if (m_Entries[i].tag_entry.count > 1 &&
3436 m_Entries[i].tag_entry.data._rats != NULL) {
3437 free(m_Entries[i].tag_entry.data._rats);
3438 m_Entries[i].tag_entry.data._rats = NULL;
3439 }
3440 }
3441 break;
3442 case EXIF_UNDEFINED:
3443 {
3444 if (m_Entries[i].tag_entry.data._undefined != NULL) {
3445 free(m_Entries[i].tag_entry.data._undefined);
3446 m_Entries[i].tag_entry.data._undefined = NULL;
3447 }
3448 }
3449 break;
3450 case EXIF_SLONG:
3451 {
3452 if (m_Entries[i].tag_entry.count > 1 &&
3453 m_Entries[i].tag_entry.data._slongs != NULL) {
3454 free(m_Entries[i].tag_entry.data._slongs);
3455 m_Entries[i].tag_entry.data._slongs = NULL;
3456 }
3457 }
3458 break;
3459 case EXIF_SRATIONAL:
3460 {
3461 if (m_Entries[i].tag_entry.count > 1 &&
3462 m_Entries[i].tag_entry.data._srats != NULL) {
3463 free(m_Entries[i].tag_entry.data._srats);
3464 m_Entries[i].tag_entry.data._srats = NULL;
3465 }
3466 }
3467 break;
3468 }
3469 }
3470 }
3471
3472 /*===========================================================================
3473 * FUNCTION : addEntry
3474 *
3475 * DESCRIPTION: function to add an entry to exif data
3476 *
3477 * PARAMETERS :
3478 * @tagid : exif tag ID
3479 * @type : data type
3480 * @count : number of data in uint of its type
3481 * @data : input data ptr
3482 *
3483 * RETURN : int32_t type of status
3484 * NO_ERROR -- success
3485 * none-zero failure code
3486 *==========================================================================*/
addEntry(exif_tag_id_t tagid,exif_tag_type_t type,uint32_t count,void * data)3487 int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
3488 exif_tag_type_t type,
3489 uint32_t count,
3490 void *data)
3491 {
3492 int32_t rc = NO_ERROR;
3493 if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
3494 LOGE("Number of entries exceeded limit");
3495 return NO_MEMORY;
3496 }
3497
3498 m_Entries[m_nNumEntries].tag_id = tagid;
3499 m_Entries[m_nNumEntries].tag_entry.type = type;
3500 m_Entries[m_nNumEntries].tag_entry.count = count;
3501 m_Entries[m_nNumEntries].tag_entry.copy = 1;
3502 switch (type) {
3503 case EXIF_BYTE:
3504 {
3505 if (count > 1) {
3506 uint8_t *values = (uint8_t *)malloc(count);
3507 if (values == NULL) {
3508 LOGE("No memory for byte array");
3509 rc = NO_MEMORY;
3510 } else {
3511 memcpy(values, data, count);
3512 m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
3513 }
3514 } else {
3515 m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
3516 }
3517 }
3518 break;
3519 case EXIF_ASCII:
3520 {
3521 char *str = NULL;
3522 str = (char *)malloc(count + 1);
3523 if (str == NULL) {
3524 LOGE("No memory for ascii string");
3525 rc = NO_MEMORY;
3526 } else {
3527 memset(str, 0, count + 1);
3528 memcpy(str, data, count);
3529 m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
3530 }
3531 }
3532 break;
3533 case EXIF_SHORT:
3534 {
3535 uint16_t *exif_data = (uint16_t *)data;
3536 if (count > 1) {
3537 uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
3538 if (values == NULL) {
3539 LOGE("No memory for short array");
3540 rc = NO_MEMORY;
3541 } else {
3542 memcpy(values, exif_data, count * sizeof(uint16_t));
3543 m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
3544 }
3545 } else {
3546 m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
3547 }
3548 }
3549 break;
3550 case EXIF_LONG:
3551 {
3552 uint32_t *exif_data = (uint32_t *)data;
3553 if (count > 1) {
3554 uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
3555 if (values == NULL) {
3556 LOGE("No memory for long array");
3557 rc = NO_MEMORY;
3558 } else {
3559 memcpy(values, exif_data, count * sizeof(uint32_t));
3560 m_Entries[m_nNumEntries].tag_entry.data._longs = values;
3561 }
3562 } else {
3563 m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
3564 }
3565 }
3566 break;
3567 case EXIF_RATIONAL:
3568 {
3569 rat_t *exif_data = (rat_t *)data;
3570 if (count > 1) {
3571 rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
3572 if (values == NULL) {
3573 LOGE("No memory for rational array");
3574 rc = NO_MEMORY;
3575 } else {
3576 memcpy(values, exif_data, count * sizeof(rat_t));
3577 m_Entries[m_nNumEntries].tag_entry.data._rats = values;
3578 }
3579 } else {
3580 m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
3581 }
3582 }
3583 break;
3584 case EXIF_UNDEFINED:
3585 {
3586 uint8_t *values = (uint8_t *)malloc(count);
3587 if (values == NULL) {
3588 LOGE("No memory for undefined array");
3589 rc = NO_MEMORY;
3590 } else {
3591 memcpy(values, data, count);
3592 m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
3593 }
3594 }
3595 break;
3596 case EXIF_SLONG:
3597 {
3598 uint32_t *exif_data = (uint32_t *)data;
3599 if (count > 1) {
3600 int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
3601 if (values == NULL) {
3602 LOGE("No memory for signed long array");
3603 rc = NO_MEMORY;
3604 } else {
3605 memcpy(values, exif_data, count * sizeof(int32_t));
3606 m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
3607 }
3608 } else {
3609 m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
3610 }
3611 }
3612 break;
3613 case EXIF_SRATIONAL:
3614 {
3615 srat_t *exif_data = (srat_t *)data;
3616 if (count > 1) {
3617 srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
3618 if (values == NULL) {
3619 LOGE("No memory for signed rational array");
3620 rc = NO_MEMORY;
3621 } else {
3622 memcpy(values, exif_data, count * sizeof(srat_t));
3623 m_Entries[m_nNumEntries].tag_entry.data._srats = values;
3624 }
3625 } else {
3626 m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
3627 }
3628 }
3629 break;
3630 }
3631
3632 // Increase number of entries
3633 m_nNumEntries++;
3634 return rc;
3635 }
3636
3637 }; // namespace qcamera
3638