1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3Channel"
32 //#define LOG_NDEBUG 0
33 #include <fcntl.h>
34 #include <stdlib.h>
35 #include <cstdlib>
36 #include <stdio.h>
37 #include <string.h>
38 #include <hardware/camera3.h>
39 #include <system/camera_metadata.h>
40 #include <gralloc_priv.h>
41 #include <utils/Log.h>
42 #include <utils/Errors.h>
43 #include <utils/Trace.h>
44 #include <cutils/properties.h>
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47
48 using namespace android;
49
50 #define MIN_STREAMING_BUFFER_NUM 7+11
51
52 namespace qcamera {
53 static const char ExifAsciiPrefix[] =
54 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
55 static const char ExifUndefinedPrefix[] =
56 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
57
58 #define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
59 #define FOCAL_LENGTH_DECIMAL_PRECISION 100
60
61 #define VIDEO_FORMAT CAM_FORMAT_YUV_420_NV12
62 #define SNAPSHOT_FORMAT CAM_FORMAT_YUV_420_NV21
63 #define PREVIEW_FORMAT CAM_FORMAT_YUV_420_NV21
64 #define DEFAULT_FORMAT CAM_FORMAT_YUV_420_NV21
65 #define CALLBACK_FORMAT CAM_FORMAT_YUV_420_NV21
66 #define RAW_FORMAT CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG
67
68 /*===========================================================================
69 * FUNCTION : QCamera3Channel
70 *
71 * DESCRIPTION: constrcutor of QCamera3Channel
72 *
73 * PARAMETERS :
74 * @cam_handle : camera handle
75 * @cam_ops : ptr to camera ops table
76 *
77 * RETURN : none
78 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)79 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
80 mm_camera_ops_t *cam_ops,
81 channel_cb_routine cb_routine,
82 cam_padding_info_t *paddingInfo,
83 uint32_t postprocess_mask,
84 void *userData)
85 {
86 m_camHandle = cam_handle;
87 m_camOps = cam_ops;
88 m_bIsActive = false;
89
90 m_handle = 0;
91 m_numStreams = 0;
92 memset(mStreams, 0, sizeof(mStreams));
93 mUserData = userData;
94
95 mStreamInfoBuf = NULL;
96 mChannelCB = cb_routine;
97 mPaddingInfo = paddingInfo;
98
99 mPostProcMask = postprocess_mask;
100
101 char prop[PROPERTY_VALUE_MAX];
102 property_get("persist.camera.yuv.dump", prop, "0");
103 mYUVDump = atoi(prop);
104 mIsType = IS_TYPE_NONE;
105 }
106
107 /*===========================================================================
108 * FUNCTION : QCamera3Channel
109 *
110 * DESCRIPTION: default constrcutor of QCamera3Channel
111 *
112 * PARAMETERS : none
113 *
114 * RETURN : none
115 *==========================================================================*/
QCamera3Channel()116 QCamera3Channel::QCamera3Channel()
117 {
118 m_camHandle = 0;
119 m_camOps = NULL;
120 m_bIsActive = false;
121
122 m_handle = 0;
123 m_numStreams = 0;
124 memset(mStreams, 0, sizeof(mStreams));
125 mUserData = NULL;
126
127 mStreamInfoBuf = NULL;
128 mChannelCB = NULL;
129 mPaddingInfo = NULL;
130
131 mPostProcMask = 0;
132 }
133
134 /*===========================================================================
135 * FUNCTION : ~QCamera3Channel
136 *
137 * DESCRIPTION: destructor of QCamera3Channel
138 *
139 * PARAMETERS : none
140 *
141 * RETURN : none
142 *==========================================================================*/
~QCamera3Channel()143 QCamera3Channel::~QCamera3Channel()
144 {
145 if (m_bIsActive)
146 stop();
147
148 for (int i = 0; i < m_numStreams; i++) {
149 if (mStreams[i] != NULL) {
150 delete mStreams[i];
151 mStreams[i] = 0;
152 }
153 }
154 if (m_handle) {
155 m_camOps->delete_channel(m_camHandle, m_handle);
156 ALOGE("%s: deleting channel %d", __func__, m_handle);
157 m_handle = 0;
158 }
159 m_numStreams = 0;
160 }
161
162 /*===========================================================================
163 * FUNCTION : init
164 *
165 * DESCRIPTION: initialization of channel
166 *
167 * PARAMETERS :
168 * @attr : channel bundle attribute setting
169 * @dataCB : data notify callback
170 * @userData: user data ptr
171 *
172 * RETURN : int32_t type of status
173 * NO_ERROR -- success
174 * none-zero failure code
175 *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)176 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
177 mm_camera_buf_notify_t dataCB)
178 {
179 m_handle = m_camOps->add_channel(m_camHandle,
180 attr,
181 dataCB,
182 this);
183 if (m_handle == 0) {
184 ALOGE("%s: Add channel failed", __func__);
185 return UNKNOWN_ERROR;
186 }
187 return NO_ERROR;
188 }
189
190 /*===========================================================================
191 * FUNCTION : addStream
192 *
193 * DESCRIPTION: add a stream into channel
194 *
195 * PARAMETERS :
196 * @allocator : stream related buffer allocator
197 * @streamInfoBuf : ptr to buf that constains stream info
198 * @minStreamBufNum: number of stream buffers needed
199 * @paddingInfo : padding information
200 * @stream_cb : stream data notify callback
201 * @userdata : user data ptr
202 *
203 * RETURN : int32_t type of status
204 * NO_ERROR -- success
205 * none-zero failure code
206 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,uint8_t minStreamBufNum,uint32_t postprocessMask,cam_is_type_t isType)207 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
208 cam_format_t streamFormat,
209 cam_dimension_t streamDim,
210 uint8_t minStreamBufNum,
211 uint32_t postprocessMask,
212 cam_is_type_t isType)
213 {
214 int32_t rc = NO_ERROR;
215
216 if (m_numStreams >= 1) {
217 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
218 return BAD_VALUE;
219 }
220
221 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
222 ALOGE("%s: stream number (%d) exceeds max limit (%d)",
223 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
224 return BAD_VALUE;
225 }
226 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
227 m_handle,
228 m_camOps,
229 mPaddingInfo,
230 this);
231 if (pStream == NULL) {
232 ALOGE("%s: No mem for Stream", __func__);
233 return NO_MEMORY;
234 }
235
236 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
237 postprocessMask, isType, streamCbRoutine, this);
238 if (rc == 0) {
239 mStreams[m_numStreams] = pStream;
240 m_numStreams++;
241 } else {
242 delete pStream;
243 }
244 return rc;
245 }
246
247 /*===========================================================================
248 * FUNCTION : start
249 *
250 * DESCRIPTION: start channel, which will start all streams belong to this channel
251 *
252 * PARAMETERS :
253 *
254 * RETURN : int32_t type of status
255 * NO_ERROR -- success
256 * none-zero failure code
257 *==========================================================================*/
start()258 int32_t QCamera3Channel::start()
259 {
260 ATRACE_CALL();
261 int32_t rc = NO_ERROR;
262
263 if (m_numStreams > 1) {
264 ALOGE("%s: bundle not supported", __func__);
265 } else if (m_numStreams == 0) {
266 return NO_INIT;
267 }
268
269 if(m_bIsActive) {
270 ALOGD("%s: Attempt to start active channel", __func__);
271 return rc;
272 }
273
274 for (int i = 0; i < m_numStreams; i++) {
275 if (mStreams[i] != NULL) {
276 mStreams[i]->start();
277 }
278 }
279 rc = m_camOps->start_channel(m_camHandle, m_handle);
280
281 if (rc != NO_ERROR) {
282 for (int i = 0; i < m_numStreams; i++) {
283 if (mStreams[i] != NULL) {
284 mStreams[i]->stop();
285 }
286 }
287 } else {
288 m_bIsActive = true;
289 }
290
291 return rc;
292 }
293
294 /*===========================================================================
295 * FUNCTION : stop
296 *
297 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
298 *
299 * PARAMETERS : none
300 *
301 * RETURN : int32_t type of status
302 * NO_ERROR -- success
303 * none-zero failure code
304 *==========================================================================*/
stop()305 int32_t QCamera3Channel::stop()
306 {
307 ATRACE_CALL();
308 int32_t rc = NO_ERROR;
309 if(!m_bIsActive) {
310 ALOGE("%s: Attempt to stop inactive channel",__func__);
311 return rc;
312 }
313
314 for (int i = 0; i < m_numStreams; i++) {
315 if (mStreams[i] != NULL) {
316 mStreams[i]->stop();
317 }
318 }
319
320 rc = m_camOps->stop_channel(m_camHandle, m_handle);
321
322 m_bIsActive = false;
323 return rc;
324 }
325
326 /*===========================================================================
327 * FUNCTION : bufDone
328 *
329 * DESCRIPTION: return a stream buf back to kernel
330 *
331 * PARAMETERS :
332 * @recvd_frame : stream buf frame to be returned
333 *
334 * RETURN : int32_t type of status
335 * NO_ERROR -- success
336 * none-zero failure code
337 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)338 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
339 {
340 int32_t rc = NO_ERROR;
341 for (int i = 0; i < recvd_frame->num_bufs; i++) {
342 if (recvd_frame->bufs[i] != NULL) {
343 for (int j = 0; j < m_numStreams; j++) {
344 if (mStreams[j] != NULL &&
345 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
346 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
347 break; // break loop j
348 }
349 }
350 }
351 }
352
353 return rc;
354 }
355
356 /*===========================================================================
357 * FUNCTION : getStreamTypeMask
358 *
359 * DESCRIPTION: Get bit mask of all stream types in this channel
360 *
361 * PARAMETERS : None
362 *
363 * RETURN : Bit mask of all stream types in this channel
364 *==========================================================================*/
getStreamTypeMask()365 uint32_t QCamera3Channel::getStreamTypeMask()
366 {
367 uint32_t mask = 0;
368 for (int i = 0; i < m_numStreams; i++) {
369 mask |= (0x1 << mStreams[i]->getMyType());
370 }
371 return mask;
372 }
373
374 /*===========================================================================
375 * FUNCTION : getStreamID
376 *
377 * DESCRIPTION: Get StreamID of requested stream type
378 *
379 * PARAMETERS : streamMask
380 *
381 * RETURN : Stream ID
382 *==========================================================================*/
getStreamID(uint32_t streamMask)383 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
384 {
385 uint32_t streamID = 0;
386 for (int i = 0; i < m_numStreams; i++) {
387 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
388 streamID = mStreams[i]->getMyServerID();
389 break;
390 }
391 }
392 return streamID;
393 }
394
395 /*===========================================================================
396 * FUNCTION : getStreamByHandle
397 *
398 * DESCRIPTION: return stream object by stream handle
399 *
400 * PARAMETERS :
401 * @streamHandle : stream handle
402 *
403 * RETURN : stream object. NULL if not found
404 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)405 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
406 {
407 for (int i = 0; i < m_numStreams; i++) {
408 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
409 return mStreams[i];
410 }
411 }
412 return NULL;
413 }
414
415 /*===========================================================================
416 * FUNCTION : getStreamByIndex
417 *
418 * DESCRIPTION: return stream object by index
419 *
420 * PARAMETERS :
421 * @streamHandle : stream handle
422 *
423 * RETURN : stream object. NULL if not found
424 *==========================================================================*/
getStreamByIndex(uint8_t index)425 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
426 {
427 if (index < m_numStreams) {
428 return mStreams[index];
429 }
430 return NULL;
431 }
432
433 /*===========================================================================
434 * FUNCTION : streamCbRoutine
435 *
436 * DESCRIPTION: callback routine for stream
437 *
438 * PARAMETERS :
439 * @streamHandle : stream handle
440 *
441 * RETURN : stream object. NULL if not found
442 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)443 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
444 QCamera3Stream *stream, void *userdata)
445 {
446 QCamera3Channel *channel = (QCamera3Channel *)userdata;
447 if (channel == NULL) {
448 ALOGE("%s: invalid channel pointer", __func__);
449 return;
450 }
451 channel->streamCbRoutine(super_frame, stream);
452 }
453
454 /*===========================================================================
455 * FUNCTION : dumpYUV
456 *
457 * DESCRIPTION: function to dump the YUV data from ISP/pproc
458 *
459 * PARAMETERS :
460 * @frame : frame to be dumped
461 * @dim : dimension of the stream
462 * @offset : offset of the data
463 * @name : 1 if it is ISP output/pproc input, 2 if it is pproc output
464 *
465 * RETURN :
466 *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t name)467 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
468 cam_frame_len_offset_t offset, uint8_t name)
469 {
470 char buf[64];
471 memset(buf, 0, sizeof(buf));
472 static int counter = 0;
473 /* Note that the image dimension will be the unrotated stream dimension.
474 * If you feel that the image would have been rotated during reprocess
475 * then swap the dimensions while opening the file
476 * */
477 snprintf(buf, sizeof(buf), "/data/local/tmp/%d_%d_%d_%dx%d.yuv",
478 name, counter, frame->frame_idx, dim.width, dim.height);
479 counter++;
480 int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
481 if (file_fd >= 0) {
482 int written_len = write(file_fd, frame->buffer, offset.frame_len);
483 ALOGE("%s: written number of bytes %d", __func__, written_len);
484 close(file_fd);
485 } else {
486 ALOGE("%s: failed to open file to dump image", __func__);
487 }
488
489 }
490
491 /*===========================================================================
492 * FUNCTION : QCamera3RegularChannel
493 *
494 * DESCRIPTION: constructor of QCamera3RegularChannel
495 *
496 * PARAMETERS :
497 * @cam_handle : camera handle
498 * @cam_ops : ptr to camera ops table
499 * @cb_routine : callback routine to frame aggregator
500 * @stream : camera3_stream_t structure
501 * @stream_type: Channel stream type
502 *
503 * RETURN : none
504 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,uint32_t postprocess_mask)505 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
506 mm_camera_ops_t *cam_ops,
507 channel_cb_routine cb_routine,
508 cam_padding_info_t *paddingInfo,
509 void *userData,
510 camera3_stream_t *stream,
511 cam_stream_type_t stream_type,
512 uint32_t postprocess_mask) :
513 QCamera3Channel(cam_handle, cam_ops, cb_routine,
514 paddingInfo, postprocess_mask, userData),
515 mCamera3Stream(stream),
516 mNumBufs(0),
517 mStreamType(stream_type),
518 mWidth(stream->width),
519 mHeight(stream->height)
520 {
521 }
522
523 /*===========================================================================
524 * FUNCTION : QCamera3RegularChannel
525 *
526 * DESCRIPTION: constructor of QCamera3RegularChannel
527 *
528 * PARAMETERS :
529 * @cam_handle : camera handle
530 * @cam_ops : ptr to camera ops table
531 * @cb_routine : callback routine to frame aggregator
532 * @stream : camera3_stream_t structure
533 * @stream_type: Channel stream type
534 * @postprocess_mask: bit mask for postprocessing
535 * @width : width overriding camera3_stream_t::width
536 * @height : height overriding camera3_stream_t::height
537 *
538 * RETURN : none
539 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,uint32_t postprocess_mask,uint32_t width,uint32_t height)540 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
541 mm_camera_ops_t *cam_ops,
542 channel_cb_routine cb_routine,
543 cam_padding_info_t *paddingInfo,
544 void *userData,
545 camera3_stream_t *stream,
546 cam_stream_type_t stream_type,
547 uint32_t postprocess_mask,
548 uint32_t width, uint32_t height) :
549 QCamera3Channel(cam_handle, cam_ops, cb_routine,
550 paddingInfo, postprocess_mask, userData),
551 mCamera3Stream(stream),
552 mNumBufs(0),
553 mStreamType(stream_type),
554 mWidth(width),
555 mHeight(height)
556 {
557 }
558
559 /*===========================================================================
560 * FUNCTION : ~QCamera3RegularChannel
561 *
562 * DESCRIPTION: destructor of QCamera3RegularChannel
563 *
564 * PARAMETERS : none
565 *
566 * RETURN : none
567 *==========================================================================*/
~QCamera3RegularChannel()568 QCamera3RegularChannel::~QCamera3RegularChannel()
569 {
570 }
571
572 /*===========================================================================
573 * FUNCTION : initialize
574 *
575 * DESCRIPTION: Initialize and add camera channel & stream
576 *
577 * PARAMETERS :
578 *
579 * RETURN : int32_t type of status
580 * NO_ERROR -- success
581 * none-zero failure code
582 *==========================================================================*/
583
initialize(cam_is_type_t isType,uint8_t intent)584 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType,
585 uint8_t intent)
586 {
587 return QCamera3RegularChannel::initialize(isType, intent);
588 }
initialize(cam_is_type_t isType,uint8_t intent)589 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType,
590 uint8_t intent)
591 {
592 ATRACE_CALL();
593 int32_t rc = NO_ERROR;
594 cam_format_t streamFormat;
595 cam_dimension_t streamDim;
596
597 if (NULL == mCamera3Stream) {
598 ALOGE("%s: Camera stream uninitialized", __func__);
599 return NO_INIT;
600 }
601
602 if (1 <= m_numStreams) {
603 // Only one stream per channel supported in v3 Hal
604 return NO_ERROR;
605 }
606
607 rc = init(NULL, NULL);
608 if (rc < 0) {
609 ALOGE("%s: init failed", __func__);
610 return rc;
611 }
612
613 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
614 mIsType = isType;
615 mIntent = intent;
616 mMemory.setColorSpace(mIntent);
617
618 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
619 if (mStreamType == CAM_STREAM_TYPE_VIDEO) {
620 streamFormat = VIDEO_FORMAT;
621 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
622 streamFormat = PREVIEW_FORMAT;
623 } else {
624 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
625 // to be properly aligned and padded.
626 streamFormat = DEFAULT_FORMAT;
627 }
628 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
629 streamFormat = CALLBACK_FORMAT;
630 } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
631 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW10 ||
632 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
633 // Bayer pattern doesn't matter here.
634 // All CAMIF raw format uses 10bit.
635 streamFormat = RAW_FORMAT;
636 } else {
637 //TODO: Fail for other types of streams for now
638 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
639 return -EINVAL;
640 }
641
642 streamDim.width = mWidth;
643 streamDim.height = mHeight;
644
645 rc = QCamera3Channel::addStream(mStreamType,
646 streamFormat,
647 streamDim,
648 mNumBufs,
649 mPostProcMask,
650 mIsType);
651
652 return rc;
653 }
654
655 /*===========================================================================
656 * FUNCTION : start
657 *
658 * DESCRIPTION: start a regular channel
659 *
660 * PARAMETERS :
661 *
662 * RETURN : int32_t type of status
663 * NO_ERROR -- success
664 * none-zero failure code
665 *==========================================================================*/
start()666 int32_t QCamera3RegularChannel::start()
667 {
668 ATRACE_CALL();
669 int32_t rc = NO_ERROR;
670
671 if (0 < mMemory.getCnt()) {
672 rc = QCamera3Channel::start();
673 }
674 return rc;
675 }
676
677 /*===========================================================================
678 * FUNCTION : request
679 *
680 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
681 *
682 * PARAMETERS :
683 * @buffer : buffer to be filled for this request
684 *
685 * RETURN : 0 on a success start of capture
686 * -EINVAL on invalid input
687 * -ENODEV on serious error
688 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)689 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
690 {
691 ATRACE_CALL();
692 //FIX ME: Return buffer back in case of failures below.
693
694 int32_t rc = NO_ERROR;
695 int index;
696
697 if (NULL == buffer) {
698 ALOGE("%s: Invalid buffer in channel request", __func__);
699 return BAD_VALUE;
700 }
701
702 if(!m_bIsActive) {
703 rc = registerBuffer(buffer, mIsType, mIntent);
704 if (NO_ERROR != rc) {
705 ALOGE("%s: On-the-fly buffer registration failed %d",
706 __func__, rc);
707 return rc;
708 }
709
710 rc = start();
711 if (NO_ERROR != rc) {
712 return rc;
713 }
714 } else {
715 CDBG("%s: Request on an existing stream",__func__);
716 }
717
718 index = mMemory.getMatchBufIndex((void*)buffer);
719 if(index < 0) {
720 rc = registerBuffer(buffer, mIsType, mIntent);
721 if (NO_ERROR != rc) {
722 ALOGE("%s: On-the-fly buffer registration failed %d",
723 __func__, rc);
724 return rc;
725 }
726
727 index = mMemory.getMatchBufIndex((void*)buffer);
728 if (index < 0) {
729 ALOGE("%s: Could not find object among registered buffers",
730 __func__);
731 return DEAD_OBJECT;
732 }
733 }
734
735 rc = mStreams[0]->bufDone(index);
736 if(rc != NO_ERROR) {
737 ALOGE("%s: Failed to Q new buffer to stream",__func__);
738 return rc;
739 }
740
741 rc = mMemory.markFrameNumber(index, frameNumber);
742 return rc;
743 }
744
745 /*===========================================================================
746 * FUNCTION : registerBuffer
747 *
748 * DESCRIPTION: register streaming buffer to the channel object
749 *
750 * PARAMETERS :
751 * @buffer : buffer to be registered
752 *
753 * RETURN : int32_t type of status
754 * NO_ERROR -- success
755 * none-zero failure code
756 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType,uint8_t intent)757 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer,
758 cam_is_type_t isType,
759 uint8_t intent)
760 {
761 ATRACE_CALL();
762 int rc = 0;
763 mIsType = isType;
764 cam_stream_type_t streamType;
765
766 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
767 ALOGE("%s: Trying to register more buffers than initially requested",
768 __func__);
769 return BAD_VALUE;
770 }
771
772 if (0 == m_numStreams) {
773 rc = initialize(mIsType, intent);
774 if (rc != NO_ERROR) {
775 ALOGE("%s: Couldn't initialize camera stream %d",
776 __func__, rc);
777 return rc;
778 }
779 }
780
781 streamType = mStreams[0]->getMyType();
782 rc = mMemory.registerBuffer(buffer);
783 if (ALREADY_EXISTS == rc) {
784 return NO_ERROR;
785 } else if (NO_ERROR != rc) {
786 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
787 return rc;
788 }
789
790 return rc;
791 }
792
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)793 void QCamera3RegularChannel::streamCbRoutine(
794 mm_camera_super_buf_t *super_frame,
795 QCamera3Stream *stream)
796 {
797 ATRACE_CALL();
798 //FIXME Q Buf back in case of error?
799 uint8_t frameIndex;
800 buffer_handle_t *resultBuffer;
801 int32_t resultFrameNumber;
802 camera3_stream_buffer_t result;
803
804 if(!super_frame) {
805 ALOGE("%s: Invalid Super buffer",__func__);
806 return;
807 }
808
809 if(super_frame->num_bufs != 1) {
810 ALOGE("%s: Multiple streams are not supported",__func__);
811 return;
812 }
813 if(super_frame->bufs[0] == NULL ) {
814 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
815 __func__);
816 return;
817 }
818
819 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
820 if(frameIndex >= mNumBufs) {
821 ALOGE("%s: Error, Invalid index for buffer",__func__);
822 if(stream) {
823 stream->bufDone(frameIndex);
824 }
825 return;
826 }
827
828 ////Use below data to issue framework callback
829 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
830 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
831
832 result.stream = mCamera3Stream;
833 result.buffer = resultBuffer;
834 result.status = CAMERA3_BUFFER_STATUS_OK;
835 result.acquire_fence = -1;
836 result.release_fence = -1;
837
838 mChannelCB(NULL, &result, resultFrameNumber, mUserData);
839 free(super_frame);
840 return;
841 }
842
getStreamBufs(uint32_t)843 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
844 {
845 return &mMemory;
846 }
847
putStreamBufs()848 void QCamera3RegularChannel::putStreamBufs()
849 {
850 mMemory.unregisterBuffers();
851 }
852
853 int QCamera3RegularChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
854
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)855 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
856 mm_camera_ops_t *cam_ops,
857 channel_cb_routine cb_routine,
858 cam_padding_info_t *paddingInfo,
859 uint32_t postprocess_mask,
860 void *userData) :
861 QCamera3Channel(cam_handle, cam_ops,
862 cb_routine, paddingInfo, postprocess_mask, userData),
863 mMemory(NULL)
864 {
865 }
866
~QCamera3MetadataChannel()867 QCamera3MetadataChannel::~QCamera3MetadataChannel()
868 {
869 if (m_bIsActive)
870 stop();
871
872 if (mMemory) {
873 mMemory->deallocate();
874 delete mMemory;
875 mMemory = NULL;
876 }
877 }
878
initialize(cam_is_type_t isType,uint8_t)879 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType,
880 uint8_t /*intent*/)
881 {
882 ATRACE_CALL();
883 int32_t rc;
884 cam_dimension_t streamDim;
885
886 if (mMemory || m_numStreams > 0) {
887 ALOGE("%s: metadata channel already initialized", __func__);
888 return -EINVAL;
889 }
890
891 rc = init(NULL, NULL);
892 if (rc < 0) {
893 ALOGE("%s: init failed", __func__);
894 return rc;
895 }
896 mIsType = isType;
897 streamDim.width = sizeof(metadata_buffer_t),
898 streamDim.height = 1;
899 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
900 streamDim, MIN_STREAMING_BUFFER_NUM, mPostProcMask, mIsType);
901 if (rc < 0) {
902 ALOGE("%s: addStream failed", __func__);
903 }
904 return rc;
905 }
906
request(buffer_handle_t *,uint32_t)907 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
908 uint32_t /*frameNumber*/)
909 {
910 if (!m_bIsActive) {
911 return start();
912 }
913 else
914 return 0;
915 }
916
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)917 void QCamera3MetadataChannel::streamCbRoutine(
918 mm_camera_super_buf_t *super_frame,
919 QCamera3Stream * /*stream*/)
920 {
921 ATRACE_CALL();
922 uint32_t requestNumber = 0;
923 if (super_frame == NULL || super_frame->num_bufs != 1) {
924 ALOGE("%s: super_frame is not valid", __func__);
925 return;
926 }
927 mChannelCB(super_frame, NULL, requestNumber, mUserData);
928 }
929
getStreamBufs(uint32_t len)930 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
931 {
932 int rc;
933 if (len < sizeof(metadata_buffer_t)) {
934 ALOGE("%s: Metadata buffer size less than structure %d vs %d",
935 __func__,
936 len,
937 sizeof(metadata_buffer_t));
938 return NULL;
939 }
940 mMemory = new QCamera3HeapMemory();
941 if (!mMemory) {
942 ALOGE("%s: unable to create metadata memory", __func__);
943 return NULL;
944 }
945 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
946 if (rc < 0) {
947 ALOGE("%s: unable to allocate metadata memory", __func__);
948 delete mMemory;
949 mMemory = NULL;
950 return NULL;
951 }
952 memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
953 return mMemory;
954 }
955
putStreamBufs()956 void QCamera3MetadataChannel::putStreamBufs()
957 {
958 mMemory->deallocate();
959 delete mMemory;
960 mMemory = NULL;
961 }
962 /*************************************************************************************/
963 // RAW Channel related functions
964 int QCamera3RawChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
965
QCamera3RawChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool raw_16)966 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
967 mm_camera_ops_t *cam_ops,
968 channel_cb_routine cb_routine,
969 cam_padding_info_t *paddingInfo,
970 void *userData,
971 camera3_stream_t *stream,
972 uint32_t postprocess_mask,
973 bool raw_16) :
974 QCamera3RegularChannel(cam_handle, cam_ops,
975 cb_routine, paddingInfo, userData, stream,
976 CAM_STREAM_TYPE_RAW, postprocess_mask),
977 mIsRaw16(raw_16)
978 {
979 char prop[PROPERTY_VALUE_MAX];
980 property_get("persist.camera.raw.debug.dump", prop, "0");
981 mRawDump = atoi(prop);
982 }
983
~QCamera3RawChannel()984 QCamera3RawChannel::~QCamera3RawChannel()
985 {
986 }
987
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)988 void QCamera3RawChannel::streamCbRoutine(
989 mm_camera_super_buf_t *super_frame,
990 QCamera3Stream * stream)
991 {
992 ATRACE_CALL();
993 /* Move this back down once verified */
994 if (mRawDump)
995 dumpRawSnapshot(super_frame->bufs[0]);
996
997 if (mIsRaw16) {
998 if (RAW_FORMAT == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
999 convertMipiToRaw16(super_frame->bufs[0]);
1000 else
1001 convertLegacyToRaw16(super_frame->bufs[0]);
1002 }
1003
1004 //Make sure cache coherence because extra processing is done
1005 mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
1006
1007 QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
1008 return;
1009 }
1010
dumpRawSnapshot(mm_camera_buf_def_t * frame)1011 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
1012 {
1013 QCamera3Stream *stream = getStreamByIndex(0);
1014 char buf[32];
1015 memset(buf, 0, sizeof(buf));
1016 cam_dimension_t dim;
1017 memset(&dim, 0, sizeof(dim));
1018 stream->getFrameDimension(dim);
1019
1020 cam_frame_len_offset_t offset;
1021 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1022 stream->getFrameOffset(offset);
1023 snprintf(buf, sizeof(buf), "/data/local/tmp/r_%d_%dx%d.raw",
1024 frame->frame_idx, dim.width, dim.height);
1025
1026 int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
1027 if (file_fd >= 0) {
1028 int written_len = write(file_fd, frame->buffer, offset.frame_len);
1029 ALOGE("%s: written number of bytes %d", __func__, written_len);
1030 close(file_fd);
1031 } else {
1032 ALOGE("%s: failed to open file to dump image", __func__);
1033 }
1034
1035 }
1036
convertLegacyToRaw16(mm_camera_buf_def_t * frame)1037 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
1038 {
1039 // Convert image buffer from Opaque raw format to RAW16 format
1040 // 10bit Opaque raw is stored in the format of:
1041 // 0000 - p5 - p4 - p3 - p2 - p1 - p0
1042 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
1043 // 4 bits are 0s. Each 64bit word contains 6 pixels.
1044
1045 QCamera3Stream *stream = getStreamByIndex(0);
1046 cam_dimension_t dim;
1047 memset(&dim, 0, sizeof(dim));
1048 stream->getFrameDimension(dim);
1049
1050 cam_frame_len_offset_t offset;
1051 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1052 stream->getFrameOffset(offset);
1053
1054 uint32_t raw16_stride = (dim.width + 15) & ~15;
1055 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1056
1057 // In-place format conversion.
1058 // Raw16 format always occupy more memory than opaque raw10.
1059 // Convert to Raw16 by iterating through all pixels from bottom-right
1060 // to top-left of the image.
1061 // One special notes:
1062 // 1. Cross-platform raw16's stride is 16 pixels.
1063 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
1064 for (int y = dim.height-1; y >= 0; y--) {
1065 uint64_t* row_start = (uint64_t *)frame->buffer +
1066 y * offset.mp[0].stride / 8;
1067 for (int x = dim.width-1; x >= 0; x--) {
1068 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
1069 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1070 }
1071 }
1072 }
1073
convertMipiToRaw16(mm_camera_buf_def_t * frame)1074 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
1075 {
1076 // Convert image buffer from mipi10 raw format to RAW16 format
1077 // mipi10 opaque raw is stored in the format of:
1078 // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
1079 // 4 pixels occupy 5 bytes, no padding needed
1080
1081 QCamera3Stream *stream = getStreamByIndex(0);
1082 cam_dimension_t dim;
1083 memset(&dim, 0, sizeof(dim));
1084 stream->getFrameDimension(dim);
1085
1086 cam_frame_len_offset_t offset;
1087 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1088 stream->getFrameOffset(offset);
1089
1090 uint32_t raw16_stride = (dim.width + 15) & ~15;
1091 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1092
1093 // In-place format conversion.
1094 // Raw16 format always occupy more memory than opaque raw10.
1095 // Convert to Raw16 by iterating through all pixels from bottom-right
1096 // to top-left of the image.
1097 // One special notes:
1098 // 1. Cross-platform raw16's stride is 16 pixels.
1099 // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
1100 for (int y = dim.height-1; y >= 0; y--) {
1101 uint8_t* row_start = (uint8_t *)frame->buffer +
1102 y * offset.mp[0].stride;
1103 for (int x = dim.width-1; x >= 0; x--) {
1104 uint8_t upper_8bit = row_start[5*(x/4)+x%4];
1105 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3);
1106 uint16_t raw16_pixel = (((uint16_t)upper_8bit)<<2 | lower_2bit);
1107 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1108 }
1109 }
1110
1111 }
1112
1113
1114 /*************************************************************************************/
1115 // RAW Dump Channel related functions
1116
1117 int QCamera3RawDumpChannel::kMaxBuffers = 3;
1118 /*===========================================================================
1119 * FUNCTION : QCamera3RawDumpChannel
1120 *
1121 * DESCRIPTION: Constructor for RawDumpChannel
1122 *
1123 * PARAMETERS :
1124 * @cam_handle : Handle for Camera
1125 * @cam_ops : Function pointer table
1126 * @rawDumpSize : Dimensions for the Raw stream
1127 * @paddinginfo : Padding information for stream
1128 * @userData : Cookie for parent
1129 * @pp mask : PP feature mask for this stream
1130 *
1131 * RETURN : NA
1132 *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,uint32_t postprocess_mask)1133 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
1134 mm_camera_ops_t *cam_ops,
1135 cam_dimension_t rawDumpSize,
1136 cam_padding_info_t *paddingInfo,
1137 void *userData,
1138 uint32_t postprocess_mask) :
1139 QCamera3Channel(cam_handle, cam_ops, NULL,
1140 paddingInfo, postprocess_mask, userData),
1141 mDim(rawDumpSize),
1142 mMemory(NULL)
1143 {
1144 char prop[PROPERTY_VALUE_MAX];
1145 property_get("persist.camera.raw.dump", prop, "0");
1146 mRawDump = atoi(prop);
1147 }
1148
1149 /*===========================================================================
1150 * FUNCTION : QCamera3RawDumpChannel
1151 *
1152 * DESCRIPTION: Destructor for RawDumpChannel
1153 *
1154 * PARAMETERS :
1155 *
1156 * RETURN : NA
1157 *==========================================================================*/
1158
~QCamera3RawDumpChannel()1159 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
1160 {
1161 }
1162
1163 /*===========================================================================
1164 * FUNCTION : dumpRawSnapshot
1165 *
1166 * DESCRIPTION: Helper function to dump Raw frames
1167 *
1168 * PARAMETERS :
1169 * @frame : stream buf frame to be dumped
1170 *
1171 * RETURN : NA
1172 *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)1173 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
1174 {
1175 QCamera3Stream *stream = getStreamByIndex(0);
1176 char buf[128];
1177 struct timeval tv;
1178 struct tm *timeinfo;
1179
1180 cam_dimension_t dim;
1181 memset(&dim, 0, sizeof(dim));
1182 stream->getFrameDimension(dim);
1183
1184 cam_frame_len_offset_t offset;
1185 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1186 stream->getFrameOffset(offset);
1187
1188 gettimeofday(&tv, NULL);
1189 timeinfo = localtime(&tv.tv_sec);
1190
1191 memset(buf, 0, sizeof(buf));
1192 snprintf(buf, sizeof(buf),
1193 "/data/%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
1194 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1195 timeinfo->tm_mday, timeinfo->tm_hour,
1196 timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
1197 frame->frame_idx, dim.width, dim.height);
1198
1199 int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
1200 if (file_fd >= 0) {
1201 int written_len = write(file_fd, frame->buffer, offset.frame_len);
1202 CDBG("%s: written number of bytes %d", __func__, written_len);
1203 close(file_fd);
1204 } else {
1205 ALOGE("%s: failed to open file to dump image", __func__);
1206 }
1207 }
1208
1209 /*===========================================================================
1210 * FUNCTION : streamCbRoutine
1211 *
1212 * DESCRIPTION: Callback routine invoked for each frame generated for
1213 * Rawdump channel
1214 *
1215 * PARAMETERS :
1216 * @super_frame : stream buf frame generated
1217 * @stream : Underlying Stream object cookie
1218 *
1219 * RETURN : NA
1220 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1221 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1222 QCamera3Stream *stream)
1223 {
1224 CDBG("%s: E",__func__);
1225 if (super_frame == NULL || super_frame->num_bufs != 1) {
1226 ALOGE("%s: super_frame is not valid", __func__);
1227 return;
1228 }
1229
1230 if (mRawDump)
1231 dumpRawSnapshot(super_frame->bufs[0]);
1232
1233 bufDone(super_frame);
1234 free(super_frame);
1235 }
1236
1237 /*===========================================================================
1238 * FUNCTION : getStreamBufs
1239 *
1240 * DESCRIPTION: Callback function provided to interface to get buffers.
1241 *
1242 * PARAMETERS :
1243 * @len : Length of each buffer to be allocated
1244 *
1245 * RETURN : NULL on buffer allocation failure
1246 * QCamera3Memory object on sucess
1247 *==========================================================================*/
getStreamBufs(uint32_t len)1248 QCamera3Memory* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
1249 {
1250 int rc;
1251 mMemory = new QCamera3HeapMemory();
1252
1253 if (!mMemory) {
1254 ALOGE("%s: unable to create heap memory", __func__);
1255 return NULL;
1256 }
1257 rc = mMemory->allocate(kMaxBuffers, len, true);
1258 if (rc < 0) {
1259 ALOGE("%s: unable to allocate heap memory", __func__);
1260 delete mMemory;
1261 mMemory = NULL;
1262 return NULL;
1263 }
1264 return mMemory;
1265 }
1266
1267 /*===========================================================================
1268 * FUNCTION : putStreamBufs
1269 *
1270 * DESCRIPTION: Callback function provided to interface to return buffers.
1271 * Although no handles are actually returned, implicitl assumption
1272 * that interface will no longer use buffers and channel can
1273 * deallocated if necessary.
1274 *
1275 * PARAMETERS : NA
1276 *
1277 * RETURN : NA
1278 *==========================================================================*/
putStreamBufs()1279 void QCamera3RawDumpChannel::putStreamBufs()
1280 {
1281 mMemory->deallocate();
1282 delete mMemory;
1283 mMemory = NULL;
1284 }
1285
1286 /*===========================================================================
1287 * FUNCTION : request
1288 *
1289 * DESCRIPTION: Request function used as trigger
1290 *
1291 * PARAMETERS :
1292 * @recvd_frame : buffer- this will be NULL since this is internal channel
1293 * @frameNumber : Undefined again since this is internal stream
1294 *
1295 * RETURN : int32_t type of status
1296 * NO_ERROR -- success
1297 * none-zero failure code
1298 *==========================================================================*/
request(buffer_handle_t *,uint32_t)1299 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
1300 uint32_t /*frameNumber*/)
1301 {
1302 if (!m_bIsActive) {
1303 return QCamera3Channel::start();
1304 }
1305 else
1306 return 0;
1307 }
1308
1309 /*===========================================================================
1310 * FUNCTION : intialize
1311 *
1312 * DESCRIPTION: Initializes channel params and creates underlying stream
1313 *
1314 * PARAMETERS : NA
1315 *
1316 * RETURN : int32_t type of status
1317 * NO_ERROR -- success
1318 * none-zero failure code
1319 *==========================================================================*/
initialize(cam_is_type_t isType,uint8_t)1320 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType,
1321 uint8_t /*intent*/)
1322 {
1323 int32_t rc;
1324
1325 rc = init(NULL, NULL);
1326 if (rc < 0) {
1327 ALOGE("%s: init failed", __func__);
1328 return rc;
1329 }
1330 mIsType = isType;
1331 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
1332 CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, kMaxBuffers,
1333 mPostProcMask, mIsType);
1334 if (rc < 0) {
1335 ALOGE("%s: addStream failed", __func__);
1336 }
1337 return rc;
1338 }
1339 /*************************************************************************************/
1340
1341 /*===========================================================================
1342 * FUNCTION : jpegEvtHandle
1343 *
1344 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
1345 Construct result payload and call mChannelCb to deliver buffer
1346 to framework.
1347 *
1348 * PARAMETERS :
1349 * @status : status of jpeg job
1350 * @client_hdl: jpeg client handle
1351 * @jobId : jpeg job Id
1352 * @p_ouput : ptr to jpeg output result struct
1353 * @userdata : user data ptr
1354 *
1355 * RETURN : none
1356 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)1357 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
1358 uint32_t /*client_hdl*/,
1359 uint32_t jobId,
1360 mm_jpeg_output_t *p_output,
1361 void *userdata)
1362 {
1363 ATRACE_CALL();
1364 buffer_handle_t *resultBuffer, *jpegBufferHandle;
1365 int32_t resultFrameNumber;
1366 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
1367 camera3_stream_buffer_t result;
1368 camera3_jpeg_blob_t jpegHeader;
1369 char* jpeg_eof = 0;
1370 int maxJpegSize;
1371 int32_t bufIdx;
1372
1373 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
1374 if (obj) {
1375 //Construct payload for process_capture_result. Call mChannelCb
1376
1377 qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
1378
1379 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
1380 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
1381 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
1382 }
1383
1384 bufIdx = job->jpeg_settings->out_buf_index;
1385 CDBG("%s: jpeg out_buf_index: %d", __func__, bufIdx);
1386
1387 //Construct jpeg transient header of type camera3_jpeg_blob_t
1388 //Append at the end of jpeg image of buf_filled_len size
1389
1390 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
1391 jpegHeader.jpeg_size = p_output->buf_filled_len;
1392
1393
1394 char* jpeg_buf = (char *)p_output->buf_vaddr;
1395
1396 // Gralloc buffer may have additional padding for 4K page size
1397 // Follow size guidelines based on spec since framework relies
1398 // on that to reach end of buffer and with it the header
1399
1400 //Handle same as resultBuffer, but for readablity
1401 jpegBufferHandle =
1402 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1403
1404 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
1405 if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
1406 maxJpegSize = obj->mMemory.getSize(bufIdx);
1407 }
1408
1409 jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
1410 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
1411 obj->mMemory.cleanInvalidateCache(bufIdx);
1412
1413 ////Use below data to issue framework callback
1414 resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1415 resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
1416
1417 result.stream = obj->mCamera3Stream;
1418 result.buffer = resultBuffer;
1419 result.status = resultStatus;
1420 result.acquire_fence = -1;
1421 result.release_fence = -1;
1422
1423 // Release any snapshot buffers before calling
1424 // the user callback. The callback can potentially
1425 // unblock pending requests to snapshot stream.
1426 if (NULL != job) {
1427 int32_t snapshotIdx = -1;
1428 mm_camera_super_buf_t* src_frame = NULL;
1429
1430 if (job->src_reproc_frame)
1431 src_frame = job->src_reproc_frame;
1432 else
1433 src_frame = job->src_frame;
1434
1435 if (src_frame) {
1436 if (obj->mStreams[0]->getMyHandle() ==
1437 src_frame->bufs[0]->stream_id) {
1438 snapshotIdx = src_frame->bufs[0]->buf_idx;
1439 } else {
1440 ALOGE("%s: Snapshot stream id %d and source frame %d don't match!",
1441 __func__, obj->mStreams[0]->getMyHandle(),
1442 src_frame->bufs[0]->stream_id);
1443 }
1444 }
1445 if (0 <= snapshotIdx) {
1446 Mutex::Autolock lock(obj->mFreeBuffersLock);
1447 obj->mFreeBufferList.push_back(snapshotIdx);
1448 } else {
1449 ALOGE("%s: Snapshot buffer not found!", __func__);
1450 }
1451 }
1452
1453 CDBG("%s: Issue Callback", __func__);
1454 obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
1455
1456 // release internal data for jpeg job
1457 if (job != NULL) {
1458 if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
1459 obj->mOfflineMetaMemory.deallocate();
1460 obj->mOfflineMemory.unregisterBuffers();
1461 }
1462 obj->m_postprocessor.releaseOfflineBuffers();
1463 obj->m_postprocessor.releaseJpegJobData(job);
1464 free(job);
1465 }
1466
1467 return;
1468 // }
1469 } else {
1470 ALOGE("%s: Null userdata in jpeg callback", __func__);
1471 }
1472 }
1473
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool is4KVideo,QCamera3Channel * metadataChannel)1474 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
1475 mm_camera_ops_t *cam_ops,
1476 channel_cb_routine cb_routine,
1477 cam_padding_info_t *paddingInfo,
1478 void *userData,
1479 camera3_stream_t *stream,
1480 uint32_t postprocess_mask,
1481 bool is4KVideo,
1482 QCamera3Channel *metadataChannel) :
1483 QCamera3Channel(cam_handle, cam_ops, cb_routine,
1484 paddingInfo, postprocess_mask, userData),
1485 m_postprocessor(this),
1486 mCamera3Stream(stream),
1487 mNumBufsRegistered(CAM_MAX_NUM_BUFS_PER_STREAM),
1488 mNumSnapshotBufs(0),
1489 mCurrentBufIndex(-1),
1490 mPostProcStarted(false),
1491 mInputBufferConfig(false),
1492 mYuvMemory(NULL),
1493 m_pMetaChannel(metadataChannel),
1494 mMetaFrame(NULL)
1495 {
1496 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
1497 m_max_pic_dim = hal_obj->calcMaxJpegDim();
1498 mYuvWidth = stream->width;
1499 mYuvHeight = stream->height;
1500 // Use same pixelformat for 4K video case
1501 mStreamFormat = is4KVideo ? VIDEO_FORMAT : SNAPSHOT_FORMAT;
1502 mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
1503 int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, mPostProcMask,
1504 this);
1505 if (rc != 0) {
1506 ALOGE("Init Postprocessor failed");
1507 }
1508 }
1509
1510 /*===========================================================================
1511 * FUNCTION : stop
1512 *
1513 * DESCRIPTION: stop pic channel, which will stop all streams within, including
1514 * the reprocessing channel in postprocessor and YUV stream.
1515 *
1516 * PARAMETERS : none
1517 *
1518 * RETURN : int32_t type of status
1519 * NO_ERROR -- success
1520 * none-zero failure code
1521 *==========================================================================*/
stop()1522 int32_t QCamera3PicChannel::stop()
1523 {
1524 int32_t rc = NO_ERROR;
1525 if(!m_bIsActive) {
1526 ALOGE("%s: Attempt to stop inactive channel",__func__);
1527 return rc;
1528 }
1529
1530 m_postprocessor.stop();
1531 mPostProcStarted = false;
1532 rc |= QCamera3Channel::stop();
1533 return rc;
1534 }
1535
~QCamera3PicChannel()1536 QCamera3PicChannel::~QCamera3PicChannel()
1537 {
1538 stop();
1539
1540 int32_t rc = m_postprocessor.deinit();
1541 if (rc != 0) {
1542 ALOGE("De-init Postprocessor failed");
1543 }
1544
1545 if (0 < mOfflineMetaMemory.getCnt()) {
1546 mOfflineMetaMemory.deallocate();
1547 }
1548 if (0 < mOfflineMemory.getCnt()) {
1549 mOfflineMemory.unregisterBuffers();
1550 }
1551 }
1552
initialize(cam_is_type_t isType,uint8_t intent)1553 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType,
1554 uint8_t intent)
1555 {
1556 int32_t rc = NO_ERROR;
1557 cam_dimension_t streamDim;
1558 cam_stream_type_t streamType;
1559 cam_format_t streamFormat;
1560 mm_camera_channel_attr_t attr;
1561
1562 if (NULL == mCamera3Stream) {
1563 ALOGE("%s: Camera stream uninitialized", __func__);
1564 return NO_INIT;
1565 }
1566
1567 if (1 <= m_numStreams) {
1568 // Only one stream per channel supported in v3 Hal
1569 return NO_ERROR;
1570 }
1571
1572 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1573 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
1574 attr.look_back = 1;
1575 attr.post_frame_skip = 1;
1576 attr.water_mark = 1;
1577 attr.max_unmatched_frames = 1;
1578
1579 rc = init(&attr, NULL);
1580 if (rc < 0) {
1581 ALOGE("%s: init failed", __func__);
1582 return rc;
1583 }
1584 mIsType = isType;
1585 mIntent = intent;
1586 mMemory.setColorSpace(mIntent);
1587
1588 streamType = mStreamType;
1589 streamFormat = mStreamFormat;
1590 streamDim.width = mYuvWidth;
1591 streamDim.height = mYuvHeight;
1592
1593 mNumSnapshotBufs = mCamera3Stream->max_buffers;
1594 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
1595 (uint8_t)mCamera3Stream->max_buffers, mPostProcMask, mIsType);
1596
1597 Mutex::Autolock lock(mFreeBuffersLock);
1598 mFreeBufferList.clear();
1599 for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
1600 mFreeBufferList.push_back(i);
1601 }
1602
1603 return rc;
1604 }
1605
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata)1606 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
1607 uint32_t frameNumber,
1608 camera3_stream_buffer_t *pInputBuffer,
1609 metadata_buffer_t *metadata)
1610 {
1611 ATRACE_CALL();
1612 //FIX ME: Return buffer back in case of failures below.
1613
1614 int32_t rc = NO_ERROR;
1615 int index;
1616 //extract rotation information
1617
1618 reprocess_config_t reproc_cfg;
1619 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1620 reproc_cfg.padding = mPaddingInfo;
1621 //to ensure a big enough buffer size set the height and width
1622 //padding to max(height padding, width padding)
1623 if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1624 reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1625 } else {
1626 reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1627 }
1628
1629 reproc_cfg.input_stream_dim.width = mYuvWidth;
1630 reproc_cfg.input_stream_dim.height = mYuvHeight;
1631 if (NULL == pInputBuffer)
1632 reproc_cfg.src_channel = this;
1633
1634 reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1635 reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1636 reproc_cfg.stream_type = mStreamType;
1637 reproc_cfg.stream_format = mStreamFormat;
1638 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &reproc_cfg.input_stream_dim,
1639 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1640 if (rc != 0) {
1641 ALOGE("%s: Snapshot stream plane info calculation failed!", __func__);
1642 return rc;
1643 }
1644 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
1645 int32_t *rotation = (int32_t *)POINTER_OF_PARAM(
1646 CAM_INTF_META_JPEG_ORIENTATION, metadata);
1647 if (*rotation == 0) {
1648 reproc_cfg.rotation = ROTATE_0;
1649 } else if (*rotation == 90) {
1650 reproc_cfg.rotation = ROTATE_90;
1651 } else if (*rotation == 180) {
1652 reproc_cfg.rotation = ROTATE_180;
1653 } else if (*rotation == 270) {
1654 reproc_cfg.rotation = ROTATE_270;
1655 }
1656 }
1657
1658 // Picture stream has already been started before any request comes in
1659 if (!m_bIsActive) {
1660 ALOGE("%s: Channel not started!!", __func__);
1661 return NO_INIT;
1662 }
1663
1664 index = mMemory.getMatchBufIndex((void*)buffer);
1665 if(index < 0) {
1666 rc = registerBuffer(buffer, mIsType, mIntent);
1667 if (NO_ERROR != rc) {
1668 ALOGE("%s: On-the-fly buffer registration failed %d",
1669 __func__, rc);
1670 return rc;
1671 }
1672
1673 index = mMemory.getMatchBufIndex((void*)buffer);
1674 if (index < 0) {
1675 ALOGE("%s: Could not find object among registered buffers",__func__);
1676 return DEAD_OBJECT;
1677 }
1678 }
1679 CDBG("%s: buffer index %d, frameNumber: %u", __func__, index, frameNumber);
1680
1681 rc = mMemory.markFrameNumber(index, frameNumber);
1682
1683 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1684 mCurrentBufIndex = index;
1685
1686 // Start postprocessor
1687 // This component needs to be re-configured
1688 // once we switch from input(framework) buffer
1689 // reprocess to standard capture!
1690 bool restartNeeded = ((!mInputBufferConfig) != (NULL != pInputBuffer));
1691 if((!mPostProcStarted) || restartNeeded) {
1692 m_postprocessor.start(reproc_cfg, metadata);
1693 mPostProcStarted = true;
1694 mInputBufferConfig = (NULL == pInputBuffer);
1695 }
1696
1697 // Queue jpeg settings
1698 rc = queueJpegSetting(index, metadata);
1699
1700 if (pInputBuffer == NULL) {
1701 Mutex::Autolock lock(mFreeBuffersLock);
1702 if (!mFreeBufferList.empty()) {
1703 List<uint32_t>::iterator it = mFreeBufferList.begin();
1704 uint32_t freeBuffer = *it;
1705 mStreams[0]->bufDone(freeBuffer);
1706 mFreeBufferList.erase(it);
1707 } else {
1708 ALOGE("%s: No snapshot buffers available!", __func__);
1709 rc = NOT_ENOUGH_DATA;
1710 }
1711 } else {
1712 if (0 < mOfflineMetaMemory.getCnt()) {
1713 mOfflineMetaMemory.deallocate();
1714 }
1715 if (0 < mOfflineMemory.getCnt()) {
1716 mOfflineMemory.unregisterBuffers();
1717 }
1718
1719 int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1720 if(input_index < 0) {
1721 rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer);
1722 if (NO_ERROR != rc) {
1723 ALOGE("%s: On-the-fly input buffer registration failed %d",
1724 __func__, rc);
1725 return rc;
1726 }
1727
1728 input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1729 if (input_index < 0) {
1730 ALOGE("%s: Could not find object among registered buffers",__func__);
1731 return DEAD_OBJECT;
1732 }
1733 }
1734 qcamera_fwk_input_pp_data_t *src_frame = NULL;
1735 src_frame = (qcamera_fwk_input_pp_data_t *)malloc(
1736 sizeof(qcamera_fwk_input_pp_data_t));
1737 if (src_frame == NULL) {
1738 ALOGE("%s: No memory for src frame", __func__);
1739 return NO_MEMORY;
1740 }
1741 memset(src_frame, 0, sizeof(qcamera_fwk_input_pp_data_t));
1742 src_frame->src_frame = *pInputBuffer;
1743 rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info,
1744 src_frame->input_buffer, input_index);
1745 if (rc != 0) {
1746 free(src_frame);
1747 return rc;
1748 }
1749 if (mYUVDump) {
1750 dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim,
1751 reproc_cfg.input_stream_plane_info.plane_info, 1);
1752 }
1753 cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1754 cam_stream_buf_plane_info_t meta_planes;
1755 rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
1756 if (rc != 0) {
1757 ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
1758 free(src_frame);
1759 return rc;
1760 }
1761
1762 rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
1763 if (NO_ERROR != rc) {
1764 ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
1765 free(src_frame);
1766 return rc;
1767 }
1768 mm_camera_buf_def_t meta_buf;
1769 cam_frame_len_offset_t offset = meta_planes.plane_info;
1770 rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
1771 if (NO_ERROR != rc) {
1772 free(src_frame);
1773 return rc;
1774 }
1775 memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1776 src_frame->metadata_buffer = meta_buf;
1777 src_frame->reproc_config = reproc_cfg;
1778
1779 CDBG_HIGH("%s: Post-process started", __func__);
1780 CDBG_HIGH("%s: Issue call to reprocess", __func__);
1781
1782 m_postprocessor.processData(src_frame);
1783 }
1784 return rc;
1785 }
1786
1787
1788 /*===========================================================================
1789 * FUNCTION : metadataBufDone
1790 *
1791 * DESCRIPTION: Buffer done method for a metadata buffer
1792 *
1793 * PARAMETERS :
1794 * @recvd_frame : received metadata frame
1795 *
1796 * RETURN : int32_t type of status
1797 * NO_ERROR -- success
1798 * none-zero failure code
1799 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1800 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1801 {
1802 int32_t rc = NO_ERROR;;
1803 if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1804 ALOGE("%s: Metadata channel or metadata buffer invalid", __func__);
1805 return BAD_VALUE;
1806 }
1807
1808 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1809
1810 return rc;
1811 }
1812
1813 /*===========================================================================
1814 * FUNCTION : dataNotifyCB
1815 *
1816 * DESCRIPTION: Channel Level callback used for super buffer data notify.
1817 * This function is registered with mm-camera-interface to handle
1818 * data notify
1819 *
1820 * PARAMETERS :
1821 * @recvd_frame : stream frame received
1822 * userdata : user data ptr
1823 *
1824 * RETURN : none
1825 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1826 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1827 void *userdata)
1828 {
1829 ATRACE_CALL();
1830 CDBG("%s: E\n", __func__);
1831 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1832
1833 if (channel == NULL) {
1834 ALOGE("%s: invalid channel pointer", __func__);
1835 return;
1836 }
1837
1838 if(channel->m_numStreams != 1) {
1839 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1840 return;
1841 }
1842
1843
1844 if(channel->mStreams[0] == NULL) {
1845 ALOGE("%s: Error: Invalid Stream object",__func__);
1846 return;
1847 }
1848
1849 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1850
1851 CDBG("%s: X\n", __func__);
1852 return;
1853 }
1854
1855 /*===========================================================================
1856 * FUNCTION : registerBuffer
1857 *
1858 * DESCRIPTION: register streaming buffer to the channel object
1859 *
1860 * PARAMETERS :
1861 * @buffer : buffer to be registered
1862 *
1863 * RETURN : int32_t type of status
1864 * NO_ERROR -- success
1865 * none-zero failure code
1866 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType,uint8_t intent)1867 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType,
1868 uint8_t intent)
1869 {
1870 int rc = 0;
1871 mIsType = isType;
1872 mIntent = intent;
1873 if ((uint32_t)mMemory.getCnt() > (mNumBufsRegistered - 1)) {
1874 ALOGE("%s: Trying to register more buffers than initially requested",
1875 __func__);
1876 return BAD_VALUE;
1877 }
1878
1879 if (0 == m_numStreams) {
1880 rc = initialize(mIsType, intent);
1881 if (rc != NO_ERROR) {
1882 ALOGE("%s: Couldn't initialize camera stream %d",
1883 __func__, rc);
1884 return rc;
1885 }
1886 }
1887 rc = mMemory.registerBuffer(buffer);
1888 if (ALREADY_EXISTS == rc) {
1889 return NO_ERROR;
1890 } else if (NO_ERROR != rc) {
1891 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
1892 return rc;
1893 }
1894
1895 CDBG("%s: X",__func__);
1896
1897 return rc;
1898 }
1899
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1900 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1901 QCamera3Stream *stream)
1902 {
1903 ATRACE_CALL();
1904 //TODO
1905 //Used only for getting YUV. Jpeg callback will be sent back from channel
1906 //directly to HWI. Refer to func jpegEvtHandle
1907
1908 //Got the yuv callback. Calling yuv callback handler in PostProc
1909 uint8_t frameIndex;
1910 mm_camera_super_buf_t* frame = NULL;
1911 if(!super_frame) {
1912 ALOGE("%s: Invalid Super buffer",__func__);
1913 return;
1914 }
1915
1916 if(super_frame->num_bufs != 1) {
1917 ALOGE("%s: Multiple streams are not supported",__func__);
1918 return;
1919 }
1920 if(super_frame->bufs[0] == NULL ) {
1921 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1922 __func__);
1923 return;
1924 }
1925
1926 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1927 CDBG("%s: recvd buf_idx: %u for further processing",
1928 __func__, (uint32_t)frameIndex);
1929 if(frameIndex >= mNumSnapshotBufs) {
1930 ALOGE("%s: Error, Invalid index for buffer",__func__);
1931 if(stream) {
1932 Mutex::Autolock lock(mFreeBuffersLock);
1933 mFreeBufferList.push_back(frameIndex);
1934 stream->bufDone(frameIndex);
1935 }
1936 return;
1937 }
1938
1939 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1940 if (frame == NULL) {
1941 ALOGE("%s: Error allocating memory to save received_frame structure.",
1942 __func__);
1943 if(stream) {
1944 Mutex::Autolock lock(mFreeBuffersLock);
1945 mFreeBufferList.push_back(frameIndex);
1946 stream->bufDone(frameIndex);
1947 }
1948 return;
1949 }
1950 *frame = *super_frame;
1951
1952 if(mYUVDump) {
1953 cam_dimension_t dim;
1954 memset(&dim, 0, sizeof(dim));
1955 stream->getFrameDimension(dim);
1956 cam_frame_len_offset_t offset;
1957 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1958 stream->getFrameOffset(offset);
1959 dumpYUV(frame->bufs[0], dim, offset, 1);
1960 }
1961
1962 m_postprocessor.processData(frame);
1963 free(super_frame);
1964 return;
1965 }
1966
getStreamBufs(uint32_t len)1967 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1968 {
1969 int rc = 0;
1970
1971 mYuvMemory = new QCamera3HeapMemory();
1972 if (!mYuvMemory) {
1973 ALOGE("%s: unable to create metadata memory", __func__);
1974 return NULL;
1975 }
1976
1977 //Queue YUV buffers in the beginning mQueueAll = true
1978 rc = mYuvMemory->allocate(mCamera3Stream->max_buffers, len, false);
1979 if (rc < 0) {
1980 ALOGE("%s: unable to allocate metadata memory", __func__);
1981 delete mYuvMemory;
1982 mYuvMemory = NULL;
1983 return NULL;
1984 }
1985 return mYuvMemory;
1986 }
1987
putStreamBufs()1988 void QCamera3PicChannel::putStreamBufs()
1989 {
1990 mMemory.unregisterBuffers();
1991
1992 mYuvMemory->deallocate();
1993 delete mYuvMemory;
1994 mYuvMemory = NULL;
1995 }
1996
queueReprocMetadata(mm_camera_super_buf_t * metadata)1997 int32_t QCamera3PicChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
1998 {
1999 return m_postprocessor.processPPMetadata(metadata);
2000 }
2001
queueJpegSetting(int32_t index,metadata_buffer_t * metadata)2002 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
2003 {
2004 jpeg_settings_t *settings =
2005 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
2006
2007 if (!settings) {
2008 ALOGE("%s: out of memory allocating jpeg_settings", __func__);
2009 return -ENOMEM;
2010 }
2011
2012 memset(settings, 0, sizeof(jpeg_settings_t));
2013
2014 settings->out_buf_index = index;
2015
2016 settings->jpeg_orientation = 0;
2017 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
2018 int32_t *orientation = (int32_t *)POINTER_OF_PARAM(
2019 CAM_INTF_META_JPEG_ORIENTATION, metadata);
2020 settings->jpeg_orientation = *orientation;
2021 }
2022
2023 settings->jpeg_quality = 85;
2024 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
2025 uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2026 CAM_INTF_META_JPEG_QUALITY, metadata);
2027 settings->jpeg_quality = *quality;
2028 }
2029
2030 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
2031 uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2032 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2033 settings->jpeg_thumb_quality = *quality;
2034 }
2035
2036 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
2037 cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF_PARAM(
2038 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2039 settings->thumbnail_size = *dimension;
2040 }
2041
2042 settings->gps_timestamp_valid = 0;
2043 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
2044 int64_t *timestamp = (int64_t *)POINTER_OF_PARAM(
2045 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2046 settings->gps_timestamp = *timestamp;
2047 settings->gps_timestamp_valid = 1;
2048 }
2049
2050 settings->gps_coordinates_valid = 0;
2051 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
2052 double *coordinates = (double *)POINTER_OF_PARAM(
2053 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2054 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
2055 settings->gps_coordinates_valid = 1;
2056 }
2057
2058 if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
2059 char *proc_methods = (char *)POINTER_OF_PARAM(
2060 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2061 memset(settings->gps_processing_method, 0,
2062 sizeof(settings->gps_processing_method));
2063 strncpy(settings->gps_processing_method, proc_methods,
2064 sizeof(settings->gps_processing_method));
2065 }
2066
2067 return m_postprocessor.processJpegSettingData(settings);
2068 }
2069
2070 /*===========================================================================
2071 * FUNCTION : getRational
2072 *
2073 * DESCRIPTION: compose rational struct
2074 *
2075 * PARAMETERS :
2076 * @rat : ptr to struct to store rational info
2077 * @num :num of the rational
2078 * @denom : denom of the rational
2079 *
2080 * RETURN : int32_t type of status
2081 * NO_ERROR -- success
2082 * none-zero failure code
2083 *==========================================================================*/
getRational(rat_t * rat,int num,int denom)2084 int32_t getRational(rat_t *rat, int num, int denom)
2085 {
2086 if (NULL == rat) {
2087 ALOGE("%s: NULL rat input", __func__);
2088 return BAD_VALUE;
2089 }
2090 rat->num = num;
2091 rat->denom = denom;
2092 return NO_ERROR;
2093 }
2094
2095 /*===========================================================================
2096 * FUNCTION : parseGPSCoordinate
2097 *
2098 * DESCRIPTION: parse GPS coordinate string
2099 *
2100 * PARAMETERS :
2101 * @coord_str : [input] coordinate string
2102 * @coord : [output] ptr to struct to store coordinate
2103 *
2104 * RETURN : int32_t type of status
2105 * NO_ERROR -- success
2106 * none-zero failure code
2107 *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)2108 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
2109 {
2110 if(coord == NULL) {
2111 ALOGE("%s: error, invalid argument coord == NULL", __func__);
2112 return BAD_VALUE;
2113 }
2114 float degF = atof(coord_str);
2115 if (degF < 0) {
2116 degF = -degF;
2117 }
2118 float minF = (degF - (int) degF) * 60;
2119 float secF = (minF - (int) minF) * 60;
2120
2121 getRational(&coord[0], (int)degF, 1);
2122 getRational(&coord[1], (int)minF, 1);
2123 getRational(&coord[2], (int)(secF * 10000), 10000);
2124 return NO_ERROR;
2125 }
2126
2127 /*===========================================================================
2128 * FUNCTION : getExifDateTime
2129 *
2130 * DESCRIPTION: query exif date time
2131 *
2132 * PARAMETERS :
2133 * @dateTime : string to store exif date time
2134 * @subsecTime : string to store exif subsec time
2135 * @count : length of the dateTime string
2136 * @subsecCount: length of the subsecTime string
2137 *
2138 * RETURN : int32_t type of status
2139 * NO_ERROR -- success
2140 * none-zero failure code
2141 *==========================================================================*/
getExifDateTime(char * dateTime,char * subsecTime,uint32_t & count,uint32_t & subsecCount)2142 int32_t getExifDateTime(char *dateTime, char *subsecTime,
2143 uint32_t &count, uint32_t &subsecCount)
2144 {
2145 //get time and date from system
2146 struct timeval tv;
2147 struct tm *timeinfo;
2148
2149 gettimeofday(&tv, NULL);
2150 timeinfo = localtime(&tv.tv_sec);
2151 //Write datetime according to EXIF Spec
2152 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
2153 snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
2154 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2155 timeinfo->tm_mday, timeinfo->tm_hour,
2156 timeinfo->tm_min, timeinfo->tm_sec);
2157 count = 20;
2158
2159 //Write subsec according to EXIF Sepc
2160 snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
2161 subsecCount = 7;
2162 return NO_ERROR;
2163 }
2164
2165 /*===========================================================================
2166 * FUNCTION : getExifFocalLength
2167 *
2168 * DESCRIPTION: get exif focal lenght
2169 *
2170 * PARAMETERS :
2171 * @focalLength : ptr to rational strcut to store focal lenght
2172 *
2173 * RETURN : int32_t type of status
2174 * NO_ERROR -- success
2175 * none-zero failure code
2176 *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)2177 int32_t getExifFocalLength(rat_t *focalLength, float value)
2178 {
2179 int focalLengthValue =
2180 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
2181 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
2182 }
2183
2184 /*===========================================================================
2185 * FUNCTION : getExifExpTimeInfo
2186 *
2187 * DESCRIPTION: get exif exposure time information
2188 *
2189 * PARAMETERS :
2190 * @expoTimeInfo : expousure time value
2191 * RETURN : nt32_t type of status
2192 * NO_ERROR -- success
2193 * none-zero failure code
2194 *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)2195 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
2196 {
2197
2198 int cal_exposureTime;
2199 if (value != 0)
2200 cal_exposureTime = value;
2201 else
2202 cal_exposureTime = 60;
2203
2204 return getRational(expoTimeInfo, 1, cal_exposureTime);
2205 }
2206
2207 /*===========================================================================
2208 * FUNCTION : getExifGpsProcessingMethod
2209 *
2210 * DESCRIPTION: get GPS processing method
2211 *
2212 * PARAMETERS :
2213 * @gpsProcessingMethod : string to store GPS process method
2214 * @count : lenght of the string
2215 *
2216 * RETURN : int32_t type of status
2217 * NO_ERROR -- success
2218 * none-zero failure code
2219 *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)2220 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
2221 uint32_t &count, char* value)
2222 {
2223 if(value != NULL) {
2224 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
2225 count = EXIF_ASCII_PREFIX_SIZE;
2226 strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
2227 count += strlen(value);
2228 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
2229 return NO_ERROR;
2230 } else {
2231 return BAD_VALUE;
2232 }
2233 }
2234
2235 /*===========================================================================
2236 * FUNCTION : getExifLatitude
2237 *
2238 * DESCRIPTION: get exif latitude
2239 *
2240 * PARAMETERS :
2241 * @latitude : ptr to rational struct to store latitude info
2242 * @ladRef : charater to indicate latitude reference
2243 *
2244 * RETURN : int32_t type of status
2245 * NO_ERROR -- success
2246 * none-zero failure code
2247 *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)2248 int32_t getExifLatitude(rat_t *latitude,
2249 char *latRef, double value)
2250 {
2251 char str[30];
2252 snprintf(str, sizeof(str), "%f", value);
2253 if(str != NULL) {
2254 parseGPSCoordinate(str, latitude);
2255
2256 //set Latitude Ref
2257 float latitudeValue = strtof(str, 0);
2258 if(latitudeValue < 0.0f) {
2259 latRef[0] = 'S';
2260 } else {
2261 latRef[0] = 'N';
2262 }
2263 latRef[1] = '\0';
2264 return NO_ERROR;
2265 }else{
2266 return BAD_VALUE;
2267 }
2268 }
2269
2270 /*===========================================================================
2271 * FUNCTION : getExifLongitude
2272 *
2273 * DESCRIPTION: get exif longitude
2274 *
2275 * PARAMETERS :
2276 * @longitude : ptr to rational struct to store longitude info
2277 * @lonRef : charater to indicate longitude reference
2278 *
2279 * RETURN : int32_t type of status
2280 * NO_ERROR -- success
2281 * none-zero failure code
2282 *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)2283 int32_t getExifLongitude(rat_t *longitude,
2284 char *lonRef, double value)
2285 {
2286 char str[30];
2287 snprintf(str, sizeof(str), "%f", value);
2288 if(str != NULL) {
2289 parseGPSCoordinate(str, longitude);
2290
2291 //set Longitude Ref
2292 float longitudeValue = strtof(str, 0);
2293 if(longitudeValue < 0.0f) {
2294 lonRef[0] = 'W';
2295 } else {
2296 lonRef[0] = 'E';
2297 }
2298 lonRef[1] = '\0';
2299 return NO_ERROR;
2300 }else{
2301 return BAD_VALUE;
2302 }
2303 }
2304
2305 /*===========================================================================
2306 * FUNCTION : getExifAltitude
2307 *
2308 * DESCRIPTION: get exif altitude
2309 *
2310 * PARAMETERS :
2311 * @altitude : ptr to rational struct to store altitude info
2312 * @altRef : charater to indicate altitude reference
2313 *
2314 * RETURN : int32_t type of status
2315 * NO_ERROR -- success
2316 * none-zero failure code
2317 *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)2318 int32_t getExifAltitude(rat_t *altitude,
2319 char *altRef, double value)
2320 {
2321 char str[30];
2322 snprintf(str, sizeof(str), "%f", value);
2323 if(str != NULL) {
2324 double value = atof(str);
2325 *altRef = 0;
2326 if(value < 0){
2327 *altRef = 1;
2328 value = -value;
2329 }
2330 return getRational(altitude, value*1000, 1000);
2331 }else{
2332 return BAD_VALUE;
2333 }
2334 }
2335
2336 /*===========================================================================
2337 * FUNCTION : getExifGpsDateTimeStamp
2338 *
2339 * DESCRIPTION: get exif GPS date time stamp
2340 *
2341 * PARAMETERS :
2342 * @gpsDateStamp : GPS date time stamp string
2343 * @bufLen : length of the string
2344 * @gpsTimeStamp : ptr to rational struct to store time stamp info
2345 *
2346 * RETURN : int32_t type of status
2347 * NO_ERROR -- success
2348 * none-zero failure code
2349 *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)2350 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
2351 uint32_t bufLen,
2352 rat_t *gpsTimeStamp, int64_t value)
2353 {
2354 char str[30];
2355 snprintf(str, sizeof(str), "%lld", value);
2356 if(str != NULL) {
2357 time_t unixTime = (time_t)atol(str);
2358 struct tm *UTCTimestamp = gmtime(&unixTime);
2359
2360 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
2361
2362 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
2363 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
2364 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
2365
2366 return NO_ERROR;
2367 } else {
2368 return BAD_VALUE;
2369 }
2370 }
2371
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)2372 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
2373 cam_rational_type_t step)
2374 {
2375 exposure_val->num = exposure_comp * step.numerator;
2376 exposure_val->denom = step.denominator;
2377 return 0;
2378 }
2379 /*===========================================================================
2380 * FUNCTION : getExifData
2381 *
2382 * DESCRIPTION: get exif data to be passed into jpeg encoding
2383 *
2384 * PARAMETERS : none
2385 *
2386 * RETURN : exif data from user setting and GPS
2387 *==========================================================================*/
getExifData(metadata_buffer_t * metadata,jpeg_settings_t * jpeg_settings)2388 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
2389 jpeg_settings_t *jpeg_settings)
2390 {
2391 QCamera3Exif *exif = new QCamera3Exif();
2392 if (exif == NULL) {
2393 ALOGE("%s: No memory for QCamera3Exif", __func__);
2394 return NULL;
2395 }
2396
2397 int32_t rc = NO_ERROR;
2398 uint32_t count = 0;
2399
2400 // add exif entries
2401 {
2402 char dateTime[20];
2403 char subsecTime[7];
2404 uint32_t subsecCount;
2405 memset(dateTime, 0, sizeof(dateTime));
2406 memset(subsecTime, 0, sizeof(subsecTime));
2407 count = 20;
2408 subsecCount = 7;
2409 rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
2410 if(rc == NO_ERROR) {
2411 exif->addEntry(EXIFTAGID_DATE_TIME,
2412 EXIF_ASCII,
2413 count,
2414 (void *)dateTime);
2415 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
2416 EXIF_ASCII,
2417 count,
2418 (void *)dateTime);
2419 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
2420 EXIF_ASCII,
2421 count,
2422 (void *)dateTime);
2423 exif->addEntry(EXIFTAGID_SUBSEC_TIME,
2424 EXIF_ASCII,
2425 subsecCount,
2426 (void *)subsecTime);
2427 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
2428 EXIF_ASCII,
2429 subsecCount,
2430 (void *)subsecTime);
2431 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
2432 EXIF_ASCII,
2433 subsecCount,
2434 (void *)subsecTime);
2435 } else {
2436 ALOGE("%s: getExifDateTime failed", __func__);
2437 }
2438 }
2439
2440 if (IS_PARAM_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
2441 float focal_length = *(float *)POINTER_OF_PARAM(
2442 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2443 rat_t focalLength;
2444 rc = getExifFocalLength(&focalLength, focal_length);
2445 if (rc == NO_ERROR) {
2446 exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
2447 EXIF_RATIONAL,
2448 1,
2449 (void *)&(focalLength));
2450 } else {
2451 ALOGE("%s: getExifFocalLength failed", __func__);
2452 }
2453 }
2454
2455 if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
2456 int16_t isoSpeed = *(int32_t *)POINTER_OF_PARAM(
2457 CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2458 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
2459 EXIF_SHORT,
2460 1,
2461 (void *)&(isoSpeed));
2462 }
2463
2464 if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
2465 int64_t sensor_exposure_time = *(int64_t *)POINTER_OF_PARAM(
2466 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2467 rat_t sensorExpTime;
2468 rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
2469 if (rc == NO_ERROR){
2470 exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
2471 EXIF_RATIONAL,
2472 1,
2473 (void *)&(sensorExpTime));
2474 } else {
2475 ALOGE("%s: getExifExpTimeInfo failed", __func__);
2476 }
2477 }
2478
2479 if (strlen(jpeg_settings->gps_processing_method) > 0) {
2480 char gpsProcessingMethod[
2481 EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
2482 count = 0;
2483 rc = getExifGpsProcessingMethod(gpsProcessingMethod,
2484 count, jpeg_settings->gps_processing_method);
2485 if(rc == NO_ERROR) {
2486 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
2487 EXIF_ASCII,
2488 count,
2489 (void *)gpsProcessingMethod);
2490 } else {
2491 ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
2492 }
2493 }
2494
2495 if (jpeg_settings->gps_coordinates_valid) {
2496
2497 //latitude
2498 rat_t latitude[3];
2499 char latRef[2];
2500 rc = getExifLatitude(latitude, latRef,
2501 jpeg_settings->gps_coordinates[0]);
2502 if(rc == NO_ERROR) {
2503 exif->addEntry(EXIFTAGID_GPS_LATITUDE,
2504 EXIF_RATIONAL,
2505 3,
2506 (void *)latitude);
2507 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
2508 EXIF_ASCII,
2509 2,
2510 (void *)latRef);
2511 } else {
2512 ALOGE("%s: getExifLatitude failed", __func__);
2513 }
2514
2515 //longitude
2516 rat_t longitude[3];
2517 char lonRef[2];
2518 rc = getExifLongitude(longitude, lonRef,
2519 jpeg_settings->gps_coordinates[1]);
2520 if(rc == NO_ERROR) {
2521 exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
2522 EXIF_RATIONAL,
2523 3,
2524 (void *)longitude);
2525
2526 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
2527 EXIF_ASCII,
2528 2,
2529 (void *)lonRef);
2530 } else {
2531 ALOGE("%s: getExifLongitude failed", __func__);
2532 }
2533
2534 //altitude
2535 rat_t altitude;
2536 char altRef;
2537 rc = getExifAltitude(&altitude, &altRef,
2538 jpeg_settings->gps_coordinates[2]);
2539 if(rc == NO_ERROR) {
2540 exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
2541 EXIF_RATIONAL,
2542 1,
2543 (void *)&(altitude));
2544
2545 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
2546 EXIF_BYTE,
2547 1,
2548 (void *)&altRef);
2549 } else {
2550 ALOGE("%s: getExifAltitude failed", __func__);
2551 }
2552 }
2553
2554 if (jpeg_settings->gps_timestamp_valid) {
2555
2556 char gpsDateStamp[20];
2557 rat_t gpsTimeStamp[3];
2558 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
2559 jpeg_settings->gps_timestamp);
2560 if(rc == NO_ERROR) {
2561 exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
2562 EXIF_ASCII,
2563 strlen(gpsDateStamp) + 1,
2564 (void *)gpsDateStamp);
2565
2566 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
2567 EXIF_RATIONAL,
2568 3,
2569 (void *)gpsTimeStamp);
2570 } else {
2571 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
2572 }
2573 }
2574
2575 if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) &&
2576 IS_PARAM_AVAILABLE(CAM_INTF_PARM_EV_STEP, metadata)) {
2577 int32_t exposure_comp = *(int32_t *)POINTER_OF_PARAM(
2578 CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2579 cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF_PARAM(
2580 CAM_INTF_PARM_EV_STEP, metadata);
2581 srat_t exposure_val;
2582 rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
2583 if(rc == NO_ERROR) {
2584 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
2585 EXIF_SRATIONAL,
2586 1,
2587 (void *)(&exposure_val));
2588 } else {
2589 ALOGE("%s: getExifExposureValue failed ", __func__);
2590 }
2591 }
2592
2593 char value[PROPERTY_VALUE_MAX];
2594 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
2595 exif->addEntry(EXIFTAGID_MAKE,
2596 EXIF_ASCII,
2597 strlen(value) + 1,
2598 (void *)value);
2599 } else {
2600 ALOGE("%s: getExifMaker failed", __func__);
2601 }
2602
2603 if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
2604 exif->addEntry(EXIFTAGID_MODEL,
2605 EXIF_ASCII,
2606 strlen(value) + 1,
2607 (void *)value);
2608 } else {
2609 ALOGE("%s: getExifModel failed", __func__);
2610 }
2611
2612 return exif;
2613 }
2614
2615 /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up. Hence
2616 allocating same number of picture channel buffers */
2617 int QCamera3PicChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
2618
overrideYuvSize(uint32_t width,uint32_t height)2619 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
2620 {
2621 mYuvWidth = width;
2622 mYuvHeight = height;
2623 }
2624
2625 /*===========================================================================
2626 * FUNCTION : QCamera3ReprocessChannel
2627 *
2628 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2629 *
2630 * PARAMETERS :
2631 * @cam_handle : camera handle
2632 * @cam_ops : ptr to camera ops table
2633 * @pp_mask : post-proccess feature mask
2634 *
2635 * RETURN : none
2636 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData,void * ch_hdl)2637 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
2638 mm_camera_ops_t *cam_ops,
2639 channel_cb_routine cb_routine,
2640 cam_padding_info_t *paddingInfo,
2641 uint32_t postprocess_mask,
2642 void *userData, void *ch_hdl) :
2643 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, postprocess_mask,
2644 userData),
2645 picChHandle(ch_hdl),
2646 mOfflineBuffersIndex(-1),
2647 m_pSrcChannel(NULL),
2648 m_pMetaChannel(NULL),
2649 mMemory(NULL)
2650 {
2651 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
2652 mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS -1;
2653 }
2654
2655
2656 /*===========================================================================
2657 * FUNCTION : QCamera3ReprocessChannel
2658 *
2659 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2660 *
2661 * PARAMETERS :
2662 * @cam_handle : camera handle
2663 * @cam_ops : ptr to camera ops table
2664 * @pp_mask : post-proccess feature mask
2665 *
2666 * RETURN : none
2667 *==========================================================================*/
initialize(cam_is_type_t isType,uint8_t)2668 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType,
2669 uint8_t /*intent*/)
2670 {
2671 int32_t rc = NO_ERROR;
2672 mm_camera_channel_attr_t attr;
2673
2674 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
2675 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
2676 attr.max_unmatched_frames = 1;
2677
2678 rc = init(&attr, NULL);
2679 if (rc < 0) {
2680 ALOGE("%s: init failed", __func__);
2681 }
2682 mIsType = isType;
2683 return rc;
2684 }
2685
2686
2687 /*===========================================================================
2688 * FUNCTION : QCamera3ReprocessChannel
2689 *
2690 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2691 *
2692 * PARAMETERS :
2693 * @cam_handle : camera handle
2694 * @cam_ops : ptr to camera ops table
2695 * @pp_mask : post-proccess feature mask
2696 *
2697 * RETURN : none
2698 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2699 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2700 QCamera3Stream *stream)
2701 {
2702 //Got the pproc data callback. Now send to jpeg encoding
2703 uint8_t frameIndex;
2704 mm_camera_super_buf_t* frame = NULL;
2705 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
2706
2707 if(!super_frame) {
2708 ALOGE("%s: Invalid Super buffer",__func__);
2709 return;
2710 }
2711
2712 if(super_frame->num_bufs != 1) {
2713 ALOGE("%s: Multiple streams are not supported",__func__);
2714 return;
2715 }
2716 if(super_frame->bufs[0] == NULL ) {
2717 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
2718 __func__);
2719 return;
2720 }
2721
2722 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
2723 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
2724 if (frame == NULL) {
2725 ALOGE("%s: Error allocating memory to save received_frame structure.",
2726 __func__);
2727 if(stream) {
2728 stream->bufDone(frameIndex);
2729 }
2730 return;
2731 }
2732 CDBG("%s: bufIndex: %u recvd from post proc",
2733 __func__, (uint32_t)frameIndex);
2734 *frame = *super_frame;
2735 if(mYUVDump) {
2736 cam_dimension_t dim;
2737 memset(&dim, 0, sizeof(dim));
2738 stream->getFrameDimension(dim);
2739 cam_frame_len_offset_t offset;
2740 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2741 stream->getFrameOffset(offset);
2742 dumpYUV(frame->bufs[0], dim, offset, 2);
2743 }
2744 obj->m_postprocessor.processPPData(frame);
2745 free(super_frame);
2746 return;
2747 }
2748
2749 /*===========================================================================
2750 * FUNCTION : QCamera3ReprocessChannel
2751 *
2752 * DESCRIPTION: default constructor of QCamera3ReprocessChannel
2753 *
2754 * PARAMETERS : none
2755 *
2756 * RETURN : none
2757 *==========================================================================*/
QCamera3ReprocessChannel()2758 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
2759 m_pSrcChannel(NULL),
2760 m_pMetaChannel(NULL)
2761 {
2762 }
2763
2764 /*===========================================================================
2765 * FUNCTION : getStreamBufs
2766 *
2767 * DESCRIPTION: register the buffers of the reprocess channel
2768 *
2769 * PARAMETERS : none
2770 *
2771 * RETURN : QCamera3Memory *
2772 *==========================================================================*/
getStreamBufs(uint32_t len)2773 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
2774 {
2775 int rc = 0;
2776
2777 mMemory = new QCamera3HeapMemory();
2778 if (!mMemory) {
2779 ALOGE("%s: unable to create reproc memory", __func__);
2780 return NULL;
2781 }
2782
2783 //Queue YUV buffers in the beginning mQueueAll = true
2784 /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
2785 * Hence allocating same number of reprocess channel's output buffers */
2786 rc = mMemory->allocate(MAX_INFLIGHT_REQUESTS, len, true);
2787 if (rc < 0) {
2788 ALOGE("%s: unable to allocate reproc memory", __func__);
2789 delete mMemory;
2790 mMemory = NULL;
2791 return NULL;
2792 }
2793 return mMemory;
2794 }
2795
2796 /*===========================================================================
2797 * FUNCTION : getStreamBufs
2798 *
2799 * DESCRIPTION: register the buffers of the reprocess channel
2800 *
2801 * PARAMETERS : none
2802 *
2803 * RETURN :
2804 *==========================================================================*/
putStreamBufs()2805 void QCamera3ReprocessChannel::putStreamBufs()
2806 {
2807 mMemory->deallocate();
2808 delete mMemory;
2809 mMemory = NULL;
2810 }
2811
2812 /*===========================================================================
2813 * FUNCTION : ~QCamera3ReprocessChannel
2814 *
2815 * DESCRIPTION: destructor of QCamera3ReprocessChannel
2816 *
2817 * PARAMETERS : none
2818 *
2819 * RETURN : none
2820 *==========================================================================*/
~QCamera3ReprocessChannel()2821 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2822 {
2823 }
2824
2825 /*===========================================================================
2826 * FUNCTION : getStreamBySrcHandle
2827 *
2828 * DESCRIPTION: find reprocess stream by its source stream handle
2829 *
2830 * PARAMETERS :
2831 * @srcHandle : source stream handle
2832 *
2833 * RETURN : ptr to reprocess stream if found. NULL if not found
2834 *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)2835 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
2836 {
2837 QCamera3Stream *pStream = NULL;
2838
2839 for (int i = 0; i < m_numStreams; i++) {
2840 if (mSrcStreamHandles[i] == srcHandle) {
2841 pStream = mStreams[i];
2842 break;
2843 }
2844 }
2845 return pStream;
2846 }
2847
2848 /*===========================================================================
2849 * FUNCTION : getSrcStreamBySrcHandle
2850 *
2851 * DESCRIPTION: find source stream by source stream handle
2852 *
2853 * PARAMETERS :
2854 * @srcHandle : source stream handle
2855 *
2856 * RETURN : ptr to reprocess stream if found. NULL if not found
2857 *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)2858 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
2859 {
2860 QCamera3Stream *pStream = NULL;
2861
2862 if (NULL == m_pSrcChannel) {
2863 return NULL;
2864 }
2865
2866 for (int i = 0; i < m_numStreams; i++) {
2867 if (mSrcStreamHandles[i] == srcHandle) {
2868 pStream = m_pSrcChannel->getStreamByIndex(i);
2869 break;
2870 }
2871 }
2872 return pStream;
2873 }
2874
2875 /*===========================================================================
2876 * FUNCTION : stop
2877 *
2878 * DESCRIPTION: stop channel
2879 *
2880 * PARAMETERS : none
2881 *
2882 * RETURN : int32_t type of status
2883 * NO_ERROR -- success
2884 * none-zero failure code
2885 *==========================================================================*/
stop()2886 int32_t QCamera3ReprocessChannel::stop()
2887 {
2888 unmapOfflineBuffers(true);
2889
2890 return QCamera3Channel::stop();
2891 }
2892
2893 /*===========================================================================
2894 * FUNCTION : unmapOfflineBuffers
2895 *
2896 * DESCRIPTION: Unmaps offline buffers
2897 *
2898 * PARAMETERS : none
2899 *
2900 * RETURN : int32_t type of status
2901 * NO_ERROR -- success
2902 * none-zero failure code
2903 *==========================================================================*/
unmapOfflineBuffers(bool all)2904 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
2905 {
2906 int rc = NO_ERROR;
2907 if (!mOfflineBuffers.empty()) {
2908 QCamera3Stream *stream = NULL;
2909 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
2910 for (; it != mOfflineBuffers.end(); it++) {
2911 stream = (*it).stream;
2912 if (NULL != stream) {
2913 rc = stream->unmapBuf((*it).type,
2914 (*it).index,
2915 -1);
2916 if (NO_ERROR != rc) {
2917 ALOGE("%s: Error during offline buffer unmap %d",
2918 __func__, rc);
2919 }
2920 CDBG("%s: Unmapped buffer with index %d", __func__, (*it).index);
2921 }
2922 if (!all) {
2923 mOfflineBuffers.erase(it);
2924 break;
2925 }
2926 }
2927 if (all) {
2928 mOfflineBuffers.clear();
2929 }
2930 }
2931
2932 if (!mOfflineMetaBuffers.empty()) {
2933 QCamera3Stream *stream = NULL;
2934 List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
2935 for (; it != mOfflineMetaBuffers.end(); it++) {
2936 stream = (*it).stream;
2937 if (NULL != stream) {
2938 rc = stream->unmapBuf((*it).type,
2939 (*it).index,
2940 -1);
2941 if (NO_ERROR != rc) {
2942 ALOGE("%s: Error during offline buffer unmap %d",
2943 __func__, rc);
2944 }
2945 CDBG("%s: Unmapped meta buffer with index %d", __func__, (*it).index);
2946 }
2947 if (!all) {
2948 mOfflineMetaBuffers.erase(it);
2949 break;
2950 }
2951 }
2952 if (all) {
2953 mOfflineMetaBuffers.clear();
2954 }
2955 }
2956 return rc;
2957 }
2958
2959
2960 /*===========================================================================
2961 * FUNCTION : extractFrameAndRotation
2962 *
2963 * DESCRIPTION: Extract output rotation and frame data if present
2964 *
2965 * PARAMETERS :
2966 * @frame : input frame from source stream
2967 * meta_buffer: metadata buffer
2968 * @metadata : corresponding metadata
2969 * @fwk_frame :
2970 *
2971 * RETURN : int32_t type of status
2972 * NO_ERROR -- success
2973 * none-zero failure code
2974 *==========================================================================*/
extractFrameCropAndRotation(mm_camera_super_buf_t * frame,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)2975 int32_t QCamera3ReprocessChannel::extractFrameCropAndRotation(mm_camera_super_buf_t *frame,
2976 mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
2977 qcamera_fwk_input_pp_data_t &fwk_frame)
2978 {
2979 if ((NULL == meta_buffer) || (NULL == frame) || (NULL == jpeg_settings)) {
2980 return BAD_VALUE;
2981 }
2982
2983 metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
2984 if (NULL == meta) {
2985 return BAD_VALUE;
2986 }
2987
2988 for (int i = 0; i < frame->num_bufs; i++) {
2989 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2990 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
2991
2992 if (pStream != NULL && pSrcStream != NULL) {
2993 // Find rotation info for reprocess stream
2994 if (jpeg_settings->jpeg_orientation == 0) {
2995 fwk_frame.reproc_config.rotation = ROTATE_0;
2996 } else if (jpeg_settings->jpeg_orientation == 90) {
2997 fwk_frame.reproc_config.rotation = ROTATE_90;
2998 } else if (jpeg_settings->jpeg_orientation == 180) {
2999 fwk_frame.reproc_config.rotation = ROTATE_180;
3000 } else if (jpeg_settings->jpeg_orientation == 270) {
3001 fwk_frame.reproc_config.rotation = ROTATE_270;
3002 }
3003
3004 // Find crop info for reprocess stream
3005 cam_crop_data_t *crop_data = (cam_crop_data_t *)
3006 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
3007 if (NULL != crop_data) {
3008 for (int j = 0; j < crop_data->num_of_streams; j++) {
3009 if (crop_data->crop_info[j].stream_id ==
3010 pSrcStream->getMyServerID()) {
3011 fwk_frame.reproc_config.output_crop =
3012 crop_data->crop_info[0].crop;
3013 CDBG("%s: Found offline reprocess crop %dx%d %dx%d",
3014 __func__,
3015 crop_data->crop_info[0].crop.left,
3016 crop_data->crop_info[0].crop.top,
3017 crop_data->crop_info[0].crop.width,
3018 crop_data->crop_info[0].crop.height);
3019 }
3020 }
3021 }
3022 fwk_frame.input_buffer = *frame->bufs[i];
3023 fwk_frame.metadata_buffer = *meta_buffer;
3024 break;
3025 } else {
3026 ALOGE("%s: Source/Re-process streams are invalid", __func__);
3027 return BAD_VALUE;
3028 }
3029 }
3030
3031 return NO_ERROR;
3032 }
3033
3034 /*===========================================================================
3035 * FUNCTION : extractCrop
3036 *
3037 * DESCRIPTION: Extract framework output crop if present
3038 *
3039 * PARAMETERS :
3040 * @frame : input frame for reprocessing
3041 *
3042 * RETURN : int32_t type of status
3043 * NO_ERROR -- success
3044 * none-zero failure code
3045 *==========================================================================*/
extractCrop(qcamera_fwk_input_pp_data_t * frame)3046 int32_t QCamera3ReprocessChannel::extractCrop(qcamera_fwk_input_pp_data_t *frame)
3047 {
3048 if (NULL == frame) {
3049 ALOGE("%s: Incorrect input frame", __func__);
3050 return BAD_VALUE;
3051 }
3052
3053 if (NULL == frame->metadata_buffer.buffer) {
3054 ALOGE("%s: No metadata available", __func__);
3055 return BAD_VALUE;
3056 }
3057
3058 // Find crop info for reprocess stream
3059 metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
3060 if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, meta)) {
3061 cam_crop_data_t *crop_data = (cam_crop_data_t *)
3062 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
3063 if (1 == crop_data->num_of_streams) {
3064 frame->reproc_config.output_crop = crop_data->crop_info[0].crop;
3065 CDBG("%s: Found offline reprocess crop %dx%d %dx%d", __func__,
3066 crop_data->crop_info[0].crop.left,
3067 crop_data->crop_info[0].crop.top,
3068 crop_data->crop_info[0].crop.width,
3069 crop_data->crop_info[0].crop.height);
3070 } else {
3071 ALOGE("%s: Incorrect number of offline crop data entries %d",
3072 __func__,
3073 crop_data->num_of_streams);
3074 return BAD_VALUE;
3075 }
3076 } else {
3077 CDBG_HIGH("%s: Crop data not present", __func__);
3078 }
3079
3080 return NO_ERROR;
3081 }
3082
3083 /*===========================================================================
3084 * FUNCTION : doReprocessOffline
3085 *
3086 * DESCRIPTION: request to do a reprocess on the frame
3087 *
3088 * PARAMETERS :
3089 * @frame : input frame for reprocessing
3090 *
3091 * RETURN : int32_t type of status
3092 * NO_ERROR -- success
3093 * none-zero failure code
3094 *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame)3095 int32_t QCamera3ReprocessChannel::doReprocessOffline(qcamera_fwk_input_pp_data_t *frame)
3096 {
3097 int32_t rc = 0;
3098 OfflineBuffer mappedBuffer;
3099
3100 if (m_numStreams < 1) {
3101 ALOGE("%s: No reprocess stream is created", __func__);
3102 return -1;
3103 }
3104
3105 if (NULL == frame) {
3106 ALOGE("%s: Incorrect input frame", __func__);
3107 return BAD_VALUE;
3108 }
3109
3110 if (NULL == frame->metadata_buffer.buffer) {
3111 ALOGE("%s: No metadata available", __func__);
3112 return BAD_VALUE;
3113 }
3114
3115 if (NULL == frame->input_buffer.buffer) {
3116 ALOGE("%s: No input buffer available", __func__);
3117 return BAD_VALUE;
3118 }
3119
3120 if ((0 == m_numStreams) || (NULL == mStreams[0])) {
3121 ALOGE("%s: Reprocess stream not initialized!", __func__);
3122 return NO_INIT;
3123 }
3124
3125 QCamera3Stream *pStream = mStreams[0];
3126 int32_t max_idx = MAX_INFLIGHT_REQUESTS-1;
3127 //loop back the indices if max burst count reached
3128 if (mOfflineBuffersIndex == max_idx) {
3129 mOfflineBuffersIndex = -1;
3130 }
3131 uint32_t buf_idx = mOfflineBuffersIndex + 1;
3132
3133 rc = pStream->mapBuf(
3134 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3135 buf_idx, -1,
3136 frame->input_buffer.fd, frame->input_buffer.frame_len);
3137 if (NO_ERROR == rc) {
3138 mappedBuffer.index = buf_idx;
3139 mappedBuffer.stream = pStream;
3140 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
3141 mOfflineBuffers.push_back(mappedBuffer);
3142 mOfflineBuffersIndex = buf_idx;
3143 CDBG("%s: Mapped buffer with index %d", __func__, mOfflineBuffersIndex);
3144 }
3145
3146 max_idx = MAX_INFLIGHT_REQUESTS*2 - 1;
3147 //loop back the indices if max burst count reached
3148 if (mOfflineMetaIndex == max_idx) {
3149 mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS-1;
3150 }
3151 uint32_t meta_buf_idx = mOfflineMetaIndex + 1;
3152
3153 rc |= pStream->mapBuf(
3154 CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
3155 meta_buf_idx, -1,
3156 frame->metadata_buffer.fd, frame->metadata_buffer.frame_len);
3157 if (NO_ERROR == rc) {
3158 mappedBuffer.index = meta_buf_idx;
3159 mappedBuffer.stream = pStream;
3160 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
3161 mOfflineMetaBuffers.push_back(mappedBuffer);
3162 mOfflineMetaIndex = meta_buf_idx;
3163 CDBG("%s: Mapped meta buffer with index %d", __func__, mOfflineMetaIndex);
3164 }
3165
3166 if (rc == NO_ERROR) {
3167 cam_stream_parm_buffer_t param;
3168 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
3169 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3170 param.reprocess.buf_index = buf_idx;
3171 param.reprocess.frame_idx = frame->input_buffer.frame_idx;
3172 param.reprocess.meta_present = 1;
3173 param.reprocess.meta_buf_index = meta_buf_idx;
3174 param.reprocess.frame_pp_config.rotation = frame->reproc_config.rotation;
3175 param.reprocess.frame_pp_config.crop.input_crop = frame->reproc_config.output_crop;
3176 param.reprocess.frame_pp_config.crop.crop_enabled = 1;
3177 rc = pStream->setParameter(param);
3178 if (rc != NO_ERROR) {
3179 ALOGE("%s: stream setParameter for reprocess failed", __func__);
3180 }
3181 } else {
3182 ALOGE("%s: Input buffer memory map failed: %d", __func__, rc);
3183 }
3184
3185 return rc;
3186 }
3187
3188 /*===========================================================================
3189 * FUNCTION : doReprocess
3190 *
3191 * DESCRIPTION: request to do a reprocess on the frame
3192 *
3193 * PARAMETERS :
3194 * @buf_fd : fd to the input buffer that needs reprocess
3195 * @buf_lenght : length of the input buffer
3196 * @ret_val : result of reprocess.
3197 * Example: Could be faceID in case of register face image.
3198 *
3199 * RETURN : int32_t type of status
3200 * NO_ERROR -- success
3201 * none-zero failure code
3202 *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)3203 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
3204 uint32_t buf_length,
3205 int32_t &ret_val,
3206 mm_camera_super_buf_t *meta_frame)
3207 {
3208 int32_t rc = 0;
3209 if (m_numStreams < 1) {
3210 ALOGE("%s: No reprocess stream is created", __func__);
3211 return -1;
3212 }
3213 if (meta_frame == NULL) {
3214 ALOGE("%s: Did not get corresponding metadata in time", __func__);
3215 return -1;
3216 }
3217
3218 uint32_t buf_idx = 0;
3219 for (int i = 0; i < m_numStreams; i++) {
3220 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3221 buf_idx, -1,
3222 buf_fd, buf_length);
3223
3224 if (rc == NO_ERROR) {
3225 cam_stream_parm_buffer_t param;
3226 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
3227 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3228 param.reprocess.buf_index = buf_idx;
3229 param.reprocess.meta_present = 1;
3230 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
3231 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
3232 rc = mStreams[i]->setParameter(param);
3233 if (rc == NO_ERROR) {
3234 ret_val = param.reprocess.ret_val;
3235 }
3236 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3237 buf_idx, -1);
3238 }
3239 }
3240 return rc;
3241 }
3242
3243 /*===========================================================================
3244 * FUNCTION : addReprocStreamsFromSource
3245 *
3246 * DESCRIPTION: add reprocess streams from input source channel
3247 *
3248 * PARAMETERS :
3249 * @config : pp feature configuration
3250 * @src_config : source reprocess configuration
3251 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
3252 * @offline : configure for offline reprocessing
3253 *
3254 * RETURN : int32_t type of status
3255 * NO_ERROR -- success
3256 * none-zero failure code
3257 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)3258 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
3259 const reprocess_config_t &src_config , cam_is_type_t is_type,
3260 QCamera3Channel *pMetaChannel)
3261 {
3262 int32_t rc = 0;
3263 cam_stream_reproc_config_t reprocess_config;
3264 cam_stream_type_t streamType;
3265
3266 /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
3267 * Hence allocating same number of reprocess channel's output buffers */
3268 int num_buffers = MAX_INFLIGHT_REQUESTS;
3269 cam_dimension_t streamDim = src_config.output_stream_dim;
3270
3271 if (NULL != src_config.src_channel) {
3272 QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
3273 if (pSrcStream == NULL) {
3274 ALOGE("%s: source channel doesn't have a stream", __func__);
3275 return BAD_VALUE;
3276 }
3277 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
3278 }
3279
3280 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
3281 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
3282
3283 reprocess_config.offline.input_fmt = src_config.stream_format;
3284 reprocess_config.offline.input_dim = src_config.input_stream_dim;
3285 reprocess_config.offline.input_buf_planes.plane_info =
3286 src_config.input_stream_plane_info.plane_info;
3287 reprocess_config.offline.num_of_bufs = num_buffers;
3288 reprocess_config.offline.input_type = src_config.stream_type;
3289
3290 reprocess_config.pp_feature_config = pp_config;
3291 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
3292 m_handle,
3293 m_camOps,
3294 mPaddingInfo,
3295 (QCamera3Channel*)this);
3296 if (pStream == NULL) {
3297 ALOGE("%s: No mem for Stream", __func__);
3298 return NO_MEMORY;
3299 }
3300
3301 rc = pStream->init(streamType, src_config.stream_format,
3302 streamDim, &reprocess_config,
3303 num_buffers,
3304 reprocess_config.pp_feature_config.feature_mask,
3305 is_type,
3306 QCamera3Channel::streamCbRoutine, this);
3307
3308 if (rc == 0) {
3309 mStreams[m_numStreams] = pStream;
3310 m_numStreams++;
3311 } else {
3312 ALOGE("%s: failed to create reprocess stream", __func__);
3313 delete pStream;
3314 }
3315
3316 if (rc == NO_ERROR) {
3317 m_pSrcChannel = src_config.src_channel;
3318 m_pMetaChannel = pMetaChannel;
3319 }
3320 if(m_camOps->request_super_buf(m_camHandle,m_handle,1,0) < 0) {
3321 ALOGE("%s: Request for super buffer failed",__func__);
3322 }
3323 return rc;
3324 }
3325
3326 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
3327
QCamera3SupportChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)3328 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
3329 mm_camera_ops_t *cam_ops,
3330 cam_padding_info_t *paddingInfo,
3331 uint32_t postprocess_mask,
3332 void *userData) :
3333 QCamera3Channel(cam_handle, cam_ops,
3334 NULL, paddingInfo, postprocess_mask, userData),
3335 mMemory(NULL)
3336 {
3337 }
3338
~QCamera3SupportChannel()3339 QCamera3SupportChannel::~QCamera3SupportChannel()
3340 {
3341 if (m_bIsActive)
3342 stop();
3343
3344 if (mMemory) {
3345 mMemory->deallocate();
3346 delete mMemory;
3347 mMemory = NULL;
3348 }
3349 }
3350
initialize(cam_is_type_t isType,uint8_t)3351 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType,
3352 uint8_t /*intent*/)
3353 {
3354 int32_t rc;
3355
3356 if (mMemory || m_numStreams > 0) {
3357 ALOGE("%s: metadata channel already initialized", __func__);
3358 return -EINVAL;
3359 }
3360
3361 rc = init(NULL, NULL);
3362 if (rc < 0) {
3363 ALOGE("%s: init failed", __func__);
3364 return rc;
3365 }
3366 mIsType = isType;
3367 // Hardcode to VGA size for now
3368 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
3369 CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM,
3370 mPostProcMask, mIsType);
3371 if (rc < 0) {
3372 ALOGE("%s: addStream failed", __func__);
3373 }
3374 return rc;
3375 }
3376
request(buffer_handle_t *,uint32_t)3377 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
3378 uint32_t /*frameNumber*/)
3379 {
3380 return NO_ERROR;
3381 }
3382
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)3383 void QCamera3SupportChannel::streamCbRoutine(
3384 mm_camera_super_buf_t *super_frame,
3385 QCamera3Stream * /*stream*/)
3386 {
3387 if (super_frame == NULL || super_frame->num_bufs != 1) {
3388 ALOGE("%s: super_frame is not valid", __func__);
3389 return;
3390 }
3391 bufDone(super_frame);
3392 free(super_frame);
3393 }
3394
getStreamBufs(uint32_t len)3395 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
3396 {
3397 int rc;
3398
3399 mMemory = new QCamera3HeapMemory();
3400 if (!mMemory) {
3401 ALOGE("%s: unable to create heap memory", __func__);
3402 return NULL;
3403 }
3404 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
3405 if (rc < 0) {
3406 ALOGE("%s: unable to allocate heap memory", __func__);
3407 delete mMemory;
3408 mMemory = NULL;
3409 return NULL;
3410 }
3411 return mMemory;
3412 }
3413
putStreamBufs()3414 void QCamera3SupportChannel::putStreamBufs()
3415 {
3416 mMemory->deallocate();
3417 delete mMemory;
3418 mMemory = NULL;
3419 }
3420
3421 }; // namespace qcamera
3422