1 /* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera3Channel"
31
32 #include <stdlib.h>
33 #include <cstdlib>
34 #include <cutils/properties.h>
35 #include <stdio.h>
36 #include <string.h>
37 #include <hardware/camera3.h>
38 #include <math.h>
39 #include <system/camera_metadata.h>
40 #include <gralloc_priv.h>
41 #include <utils/Log.h>
42 #include <utils/Errors.h>
43 #include <cutils/properties.h>
44 #include "QCamera3Channel.h"
45
46 using namespace android;
47
48 #define MIN_STREAMING_BUFFER_NUM 7
49
50 namespace qcamera {
51 static const char ExifAsciiPrefix[] =
52 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
53 static const char ExifUndefinedPrefix[] =
54 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
55
56 #define GPS_PROCESSING_METHOD_SIZE 101
57 #define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
58 #define FOCAL_LENGTH_DECIMAL_PRECISION 100
59
60 /*===========================================================================
61 * FUNCTION : QCamera3Channel
62 *
63 * DESCRIPTION: constrcutor of QCamera3Channel
64 *
65 * PARAMETERS :
66 * @cam_handle : camera handle
67 * @cam_ops : ptr to camera ops table
68 *
69 * RETURN : none
70 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)71 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
72 mm_camera_ops_t *cam_ops,
73 channel_cb_routine cb_routine,
74 cam_padding_info_t *paddingInfo,
75 void *userData)
76 {
77 m_camHandle = cam_handle;
78 m_camOps = cam_ops;
79 m_bIsActive = false;
80
81 m_handle = 0;
82 m_numStreams = 0;
83 memset(mStreams, 0, sizeof(mStreams));
84 mUserData = userData;
85
86 mStreamInfoBuf = NULL;
87 mChannelCB = cb_routine;
88 mPaddingInfo = paddingInfo;
89 }
90
91 /*===========================================================================
92 * FUNCTION : QCamera3Channel
93 *
94 * DESCRIPTION: default constrcutor of QCamera3Channel
95 *
96 * PARAMETERS : none
97 *
98 * RETURN : none
99 *==========================================================================*/
QCamera3Channel()100 QCamera3Channel::QCamera3Channel()
101 {
102 m_camHandle = 0;
103 m_camOps = NULL;
104 m_bIsActive = false;
105
106 m_handle = 0;
107 m_numStreams = 0;
108 memset(mStreams, 0, sizeof(mStreams));
109 mUserData = NULL;
110
111 mStreamInfoBuf = NULL;
112 mChannelCB = NULL;
113 mPaddingInfo = NULL;
114 }
115
116 /*===========================================================================
117 * FUNCTION : ~QCamera3Channel
118 *
119 * DESCRIPTION: destructor of QCamera3Channel
120 *
121 * PARAMETERS : none
122 *
123 * RETURN : none
124 *==========================================================================*/
~QCamera3Channel()125 QCamera3Channel::~QCamera3Channel()
126 {
127 if (m_bIsActive)
128 stop();
129
130 for (int i = 0; i < m_numStreams; i++) {
131 if (mStreams[i] != NULL) {
132 delete mStreams[i];
133 mStreams[i] = 0;
134 }
135 }
136 if (m_handle) {
137 m_camOps->delete_channel(m_camHandle, m_handle);
138 ALOGE("%s: deleting channel %d", __func__, m_handle);
139 m_handle = 0;
140 }
141 m_numStreams = 0;
142 }
143
144 /*===========================================================================
145 * FUNCTION : init
146 *
147 * DESCRIPTION: initialization of channel
148 *
149 * PARAMETERS :
150 * @attr : channel bundle attribute setting
151 * @dataCB : data notify callback
152 * @userData: user data ptr
153 *
154 * RETURN : int32_t type of status
155 * NO_ERROR -- success
156 * none-zero failure code
157 *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)158 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
159 mm_camera_buf_notify_t dataCB)
160 {
161 m_handle = m_camOps->add_channel(m_camHandle,
162 attr,
163 dataCB,
164 this);
165 if (m_handle == 0) {
166 ALOGE("%s: Add channel failed", __func__);
167 return UNKNOWN_ERROR;
168 }
169 return NO_ERROR;
170 }
171
172 /*===========================================================================
173 * FUNCTION : addStream
174 *
175 * DESCRIPTION: add a stream into channel
176 *
177 * PARAMETERS :
178 * @allocator : stream related buffer allocator
179 * @streamInfoBuf : ptr to buf that constains stream info
180 * @minStreamBufNum: number of stream buffers needed
181 * @paddingInfo : padding information
182 * @stream_cb : stream data notify callback
183 * @userdata : user data ptr
184 *
185 * RETURN : int32_t type of status
186 * NO_ERROR -- success
187 * none-zero failure code
188 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,uint8_t minStreamBufNum)189 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
190 cam_format_t streamFormat,
191 cam_dimension_t streamDim,
192 uint8_t minStreamBufNum)
193 {
194 int32_t rc = NO_ERROR;
195
196 if (m_numStreams >= 1) {
197 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
198 return BAD_VALUE;
199 }
200
201 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
202 ALOGE("%s: stream number (%d) exceeds max limit (%d)",
203 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
204 return BAD_VALUE;
205 }
206 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
207 m_handle,
208 m_camOps,
209 mPaddingInfo,
210 this);
211 if (pStream == NULL) {
212 ALOGE("%s: No mem for Stream", __func__);
213 return NO_MEMORY;
214 }
215
216 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
217 streamCbRoutine, this);
218 if (rc == 0) {
219 mStreams[m_numStreams] = pStream;
220 m_numStreams++;
221 } else {
222 delete pStream;
223 }
224 return rc;
225 }
226
227 /*===========================================================================
228 * FUNCTION : start
229 *
230 * DESCRIPTION: start channel, which will start all streams belong to this channel
231 *
232 * PARAMETERS :
233 *
234 * RETURN : int32_t type of status
235 * NO_ERROR -- success
236 * none-zero failure code
237 *==========================================================================*/
start()238 int32_t QCamera3Channel::start()
239 {
240 int32_t rc = NO_ERROR;
241
242 if (m_numStreams > 1) {
243 ALOGE("%s: bundle not supported", __func__);
244 }
245
246 for (int i = 0; i < m_numStreams; i++) {
247 if (mStreams[i] != NULL) {
248 mStreams[i]->start();
249 }
250 }
251 rc = m_camOps->start_channel(m_camHandle, m_handle);
252
253 if (rc != NO_ERROR) {
254 for (int i = 0; i < m_numStreams; i++) {
255 if (mStreams[i] != NULL) {
256 mStreams[i]->stop();
257 }
258 }
259 } else {
260 m_bIsActive = true;
261 }
262
263 return rc;
264 }
265
266 /*===========================================================================
267 * FUNCTION : stop
268 *
269 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
270 *
271 * PARAMETERS : none
272 *
273 * RETURN : int32_t type of status
274 * NO_ERROR -- success
275 * none-zero failure code
276 *==========================================================================*/
stop()277 int32_t QCamera3Channel::stop()
278 {
279 int32_t rc = NO_ERROR;
280 if(!m_bIsActive) {
281 ALOGE("%s: Attempt to stop inactive channel",__func__);
282 return rc;
283 }
284
285 rc = m_camOps->stop_channel(m_camHandle, m_handle);
286
287 for (int i = 0; i < m_numStreams; i++) {
288 if (mStreams[i] != NULL) {
289 mStreams[i]->stop();
290 }
291 }
292
293 m_bIsActive = false;
294 return rc;
295 }
296
297 /*===========================================================================
298 * FUNCTION : bufDone
299 *
300 * DESCRIPTION: return a stream buf back to kernel
301 *
302 * PARAMETERS :
303 * @recvd_frame : stream buf frame to be returned
304 *
305 * RETURN : int32_t type of status
306 * NO_ERROR -- success
307 * none-zero failure code
308 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)309 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
310 {
311 int32_t rc = NO_ERROR;
312 for (int i = 0; i < recvd_frame->num_bufs; i++) {
313 if (recvd_frame->bufs[i] != NULL) {
314 for (int j = 0; j < m_numStreams; j++) {
315 if (mStreams[j] != NULL &&
316 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
317 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
318 break; // break loop j
319 }
320 }
321 }
322 }
323
324 return rc;
325 }
326
327 /*===========================================================================
328 * FUNCTION : getStreamTypeMask
329 *
330 * DESCRIPTION: Get bit mask of all stream types in this channel
331 *
332 * PARAMETERS : None
333 *
334 * RETURN : Bit mask of all stream types in this channel
335 *==========================================================================*/
getStreamTypeMask()336 uint32_t QCamera3Channel::getStreamTypeMask()
337 {
338 uint32_t mask = 0;
339 for (int i = 0; i < m_numStreams; i++) {
340 mask |= (0x1 << mStreams[i]->getMyType());
341 }
342 return mask;
343 }
344
345 /*===========================================================================
346 * FUNCTION : getInternalFormatBuffer
347 *
348 * DESCRIPTION: return buffer in the internal format structure
349 *
350 * PARAMETERS :
351 * @streamHandle : buffer handle
352 *
353 * RETURN : stream object. NULL if not found
354 *==========================================================================*/
getInternalFormatBuffer(buffer_handle_t * buffer)355 mm_camera_buf_def_t* QCamera3RegularChannel::getInternalFormatBuffer(
356 buffer_handle_t * buffer)
357 {
358 int32_t index;
359 if(buffer == NULL)
360 return NULL;
361 index = mMemory->getMatchBufIndex((void*)buffer);
362 if(index < 0) {
363 ALOGE("%s: Could not find object among registered buffers",__func__);
364 return NULL;
365 }
366 return mStreams[0]->getInternalFormatBuffer(index);
367 }
368
369 /*===========================================================================
370 * FUNCTION : getStreamByHandle
371 *
372 * DESCRIPTION: return stream object by stream handle
373 *
374 * PARAMETERS :
375 * @streamHandle : stream handle
376 *
377 * RETURN : stream object. NULL if not found
378 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)379 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
380 {
381 for (int i = 0; i < m_numStreams; i++) {
382 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
383 return mStreams[i];
384 }
385 }
386 return NULL;
387 }
388
389 /*===========================================================================
390 * FUNCTION : getStreamByIndex
391 *
392 * DESCRIPTION: return stream object by index
393 *
394 * PARAMETERS :
395 * @streamHandle : stream handle
396 *
397 * RETURN : stream object. NULL if not found
398 *==========================================================================*/
getStreamByIndex(uint8_t index)399 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
400 {
401 if (index < m_numStreams) {
402 return mStreams[index];
403 }
404 return NULL;
405 }
406
407 /*===========================================================================
408 * FUNCTION : streamCbRoutine
409 *
410 * DESCRIPTION: callback routine for stream
411 *
412 * PARAMETERS :
413 * @streamHandle : stream handle
414 *
415 * RETURN : stream object. NULL if not found
416 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)417 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
418 QCamera3Stream *stream, void *userdata)
419 {
420 QCamera3Channel *channel = (QCamera3Channel *)userdata;
421 if (channel == NULL) {
422 ALOGE("%s: invalid channel pointer", __func__);
423 return;
424 }
425 channel->streamCbRoutine(super_frame, stream);
426 }
427
428 /*===========================================================================
429 * FUNCTION : QCamera3RegularChannel
430 *
431 * DESCRIPTION: constrcutor of QCamera3RegularChannel
432 *
433 * PARAMETERS :
434 * @cam_handle : camera handle
435 * @cam_ops : ptr to camera ops table
436 * @cb_routine : callback routine to frame aggregator
437 * @stream : camera3_stream_t structure
438 *
439 * RETURN : none
440 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream)441 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
442 mm_camera_ops_t *cam_ops,
443 channel_cb_routine cb_routine,
444 cam_padding_info_t *paddingInfo,
445 void *userData,
446 camera3_stream_t *stream) :
447 QCamera3Channel(cam_handle, cam_ops, cb_routine,
448 paddingInfo, userData),
449 mCamera3Stream(stream),
450 mNumBufs(0),
451 mCamera3Buffers(NULL),
452 mMemory(NULL),
453 mWidth(stream->width),
454 mHeight(stream->height)
455 {
456 }
457
458 /*===========================================================================
459 * FUNCTION : QCamera3RegularChannel
460 *
461 * DESCRIPTION: constrcutor of QCamera3RegularChannel
462 *
463 * PARAMETERS :
464 * @cam_handle : camera handle
465 * @cam_ops : ptr to camera ops table
466 * @cb_routine : callback routine to frame aggregator
467 * @stream : camera3_stream_t structure
468 *
469 * RETURN : none
470 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t width,uint32_t height)471 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
472 mm_camera_ops_t *cam_ops,
473 channel_cb_routine cb_routine,
474 cam_padding_info_t *paddingInfo,
475 void *userData,
476 camera3_stream_t *stream,
477 uint32_t width, uint32_t height) :
478 QCamera3Channel(cam_handle, cam_ops, cb_routine,
479 paddingInfo, userData),
480 mCamera3Stream(stream),
481 mNumBufs(0),
482 mCamera3Buffers(NULL),
483 mMemory(NULL),
484 mWidth(width),
485 mHeight(height)
486 {
487 }
488
489 /*===========================================================================
490 * FUNCTION : ~QCamera3RegularChannel
491 *
492 * DESCRIPTION: destructor of QCamera3RegularChannel
493 *
494 * PARAMETERS : none
495 *
496 * RETURN : none
497 *==========================================================================*/
~QCamera3RegularChannel()498 QCamera3RegularChannel::~QCamera3RegularChannel()
499 {
500 if (mCamera3Buffers) {
501 delete[] mCamera3Buffers;
502 }
503 }
504
initialize()505 int32_t QCamera3RegularChannel::initialize()
506 {
507 //TO DO
508 return 0;
509 }
510
511 /*===========================================================================
512 * FUNCTION : request
513 *
514 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
515 *
516 * PARAMETERS :
517 * @buffer : buffer to be filled for this request
518 *
519 * RETURN : 0 on a success start of capture
520 * -EINVAL on invalid input
521 * -ENODEV on serious error
522 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)523 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
524 {
525 //FIX ME: Return buffer back in case of failures below.
526
527 int32_t rc = NO_ERROR;
528 int index;
529 if(!m_bIsActive) {
530 ALOGD("%s: First request on this channel starting stream",__func__);
531 start();
532 if(rc != NO_ERROR) {
533 ALOGE("%s: Failed to start the stream on the request",__func__);
534 return rc;
535 }
536 } else {
537 ALOGV("%s: Request on an existing stream",__func__);
538 }
539
540 if(!mMemory) {
541 ALOGE("%s: error, Gralloc Memory object not yet created for this stream",__func__);
542 return NO_MEMORY;
543 }
544
545 index = mMemory->getMatchBufIndex((void*)buffer);
546 if(index < 0) {
547 ALOGE("%s: Could not find object among registered buffers",__func__);
548 return DEAD_OBJECT;
549 }
550
551 rc = mStreams[0]->bufDone(index);
552 if(rc != NO_ERROR) {
553 ALOGE("%s: Failed to Q new buffer to stream",__func__);
554 return rc;
555 }
556
557 rc = mMemory->markFrameNumber(index, frameNumber);
558 return rc;
559 }
560
561 /*===========================================================================
562 * FUNCTION : registerBuffers
563 *
564 * DESCRIPTION: register streaming buffers to the channel object
565 *
566 * PARAMETERS :
567 * @num_buffers : number of buffers to be registered
568 * @buffers : buffer to be registered
569 *
570 * RETURN : 0 on a success start of capture
571 * -EINVAL on invalid input
572 * -ENOMEM on failure to register the buffer
573 * -ENODEV on serious error
574 *==========================================================================*/
registerBuffers(uint32_t num_buffers,buffer_handle_t ** buffers)575 int32_t QCamera3RegularChannel::registerBuffers(uint32_t num_buffers, buffer_handle_t **buffers)
576 {
577 int rc = 0;
578 struct private_handle_t *priv_handle = (struct private_handle_t *)(*buffers[0]);
579 cam_stream_type_t streamType;
580 cam_format_t streamFormat;
581 cam_dimension_t streamDim;
582
583 rc = init(NULL, NULL);
584 if (rc < 0) {
585 ALOGE("%s: init failed", __func__);
586 return rc;
587 }
588
589 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
590 if (priv_handle->flags & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
591 streamType = CAM_STREAM_TYPE_VIDEO;
592 streamFormat = CAM_FORMAT_YUV_420_NV12;
593 } else if (priv_handle->flags & private_handle_t::PRIV_FLAGS_HW_TEXTURE) {
594 streamType = CAM_STREAM_TYPE_PREVIEW;
595 streamFormat = CAM_FORMAT_YUV_420_NV21;
596 } else {
597 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
598 // to be properly aligned and padded.
599 ALOGE("%s: priv_handle->flags 0x%x not supported",
600 __func__, priv_handle->flags);
601 streamType = CAM_STREAM_TYPE_SNAPSHOT;
602 streamFormat = CAM_FORMAT_YUV_420_NV21;
603 }
604 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
605 streamType = CAM_STREAM_TYPE_CALLBACK;
606 streamFormat = CAM_FORMAT_YUV_420_NV21;
607 } else {
608 //TODO: Fail for other types of streams for now
609 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
610 return -EINVAL;
611 }
612
613 /* Bookkeep buffer set because they go out of scope after register call */
614 mNumBufs = num_buffers;
615 mCamera3Buffers = new buffer_handle_t*[num_buffers];
616 if (mCamera3Buffers == NULL) {
617 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
618 return -ENOMEM;
619 }
620 for (size_t i = 0; i < num_buffers; i++)
621 mCamera3Buffers[i] = buffers[i];
622
623 streamDim.width = mWidth;
624 streamDim.height = mHeight;
625
626 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
627 num_buffers);
628 return rc;
629 }
630
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)631 void QCamera3RegularChannel::streamCbRoutine(
632 mm_camera_super_buf_t *super_frame,
633 QCamera3Stream *stream)
634 {
635 //FIXME Q Buf back in case of error?
636 uint8_t frameIndex;
637 buffer_handle_t *resultBuffer;
638 int32_t resultFrameNumber;
639 camera3_stream_buffer_t result;
640
641 if(!super_frame) {
642 ALOGE("%s: Invalid Super buffer",__func__);
643 return;
644 }
645
646 if(super_frame->num_bufs != 1) {
647 ALOGE("%s: Multiple streams are not supported",__func__);
648 return;
649 }
650 if(super_frame->bufs[0] == NULL ) {
651 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
652 __func__);
653 return;
654 }
655
656 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
657 if(frameIndex >= mNumBufs) {
658 ALOGE("%s: Error, Invalid index for buffer",__func__);
659 if(stream) {
660 stream->bufDone(frameIndex);
661 }
662 return;
663 }
664
665 ////Use below data to issue framework callback
666 resultBuffer = mCamera3Buffers[frameIndex];
667 resultFrameNumber = mMemory->getFrameNumber(frameIndex);
668
669 result.stream = mCamera3Stream;
670 result.buffer = resultBuffer;
671 result.status = CAMERA3_BUFFER_STATUS_OK;
672 result.acquire_fence = -1;
673 result.release_fence = -1;
674
675 mChannelCB(NULL, &result, resultFrameNumber, mUserData);
676 free(super_frame);
677 return;
678 }
679
getStreamBufs(uint32_t)680 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
681 {
682 if (mNumBufs == 0 || mCamera3Buffers == NULL) {
683 ALOGE("%s: buffers not registered yet", __func__);
684 return NULL;
685 }
686
687 mMemory = new QCamera3GrallocMemory();
688 if (mMemory == NULL) {
689 return NULL;
690 }
691
692 if (mMemory->registerBuffers(mNumBufs, mCamera3Buffers) < 0) {
693 delete mMemory;
694 mMemory = NULL;
695 return NULL;
696 }
697 return mMemory;
698 }
699
putStreamBufs()700 void QCamera3RegularChannel::putStreamBufs()
701 {
702 mMemory->unregisterBuffers();
703 delete mMemory;
704 mMemory = NULL;
705 }
706
707 int QCamera3RegularChannel::kMaxBuffers = 7;
708
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)709 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
710 mm_camera_ops_t *cam_ops,
711 channel_cb_routine cb_routine,
712 cam_padding_info_t *paddingInfo,
713 void *userData) :
714 QCamera3Channel(cam_handle, cam_ops,
715 cb_routine, paddingInfo, userData),
716 mMemory(NULL)
717 {
718 }
719
~QCamera3MetadataChannel()720 QCamera3MetadataChannel::~QCamera3MetadataChannel()
721 {
722 if (m_bIsActive)
723 stop();
724
725 if (mMemory) {
726 mMemory->deallocate();
727 delete mMemory;
728 mMemory = NULL;
729 }
730 }
731
initialize()732 int32_t QCamera3MetadataChannel::initialize()
733 {
734 int32_t rc;
735 cam_dimension_t streamDim;
736
737 if (mMemory || m_numStreams > 0) {
738 ALOGE("%s: metadata channel already initialized", __func__);
739 return -EINVAL;
740 }
741
742 rc = init(NULL, NULL);
743 if (rc < 0) {
744 ALOGE("%s: init failed", __func__);
745 return rc;
746 }
747
748 streamDim.width = sizeof(metadata_buffer_t),
749 streamDim.height = 1;
750 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
751 streamDim, MIN_STREAMING_BUFFER_NUM);
752 if (rc < 0) {
753 ALOGE("%s: addStream failed", __func__);
754 }
755 return rc;
756 }
757
request(buffer_handle_t *,uint32_t)758 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
759 uint32_t /*frameNumber*/)
760 {
761 if (!m_bIsActive) {
762 return start();
763 }
764 else
765 return 0;
766 }
767
registerBuffers(uint32_t,buffer_handle_t **)768 int32_t QCamera3MetadataChannel::registerBuffers(uint32_t /*num_buffers*/,
769 buffer_handle_t ** /*buffers*/)
770 {
771 // no registerBuffers are supported for metadata channel
772 return -EINVAL;
773 }
774
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)775 void QCamera3MetadataChannel::streamCbRoutine(
776 mm_camera_super_buf_t *super_frame,
777 QCamera3Stream *stream)
778 {
779 uint32_t requestNumber = 0;
780 if (super_frame == NULL || super_frame->num_bufs != 1) {
781 ALOGE("%s: super_frame is not valid", __func__);
782 return;
783 }
784 mChannelCB(super_frame, NULL, requestNumber, mUserData);
785 }
786
getStreamBufs(uint32_t len)787 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
788 {
789 int rc;
790 if (len < sizeof(metadata_buffer_t)) {
791 ALOGE("%s: size doesn't match %d vs %d", __func__,
792 len, sizeof(metadata_buffer_t));
793 return NULL;
794 }
795 mMemory = new QCamera3HeapMemory();
796 if (!mMemory) {
797 ALOGE("%s: unable to create metadata memory", __func__);
798 return NULL;
799 }
800 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
801 if (rc < 0) {
802 ALOGE("%s: unable to allocate metadata memory", __func__);
803 delete mMemory;
804 mMemory = NULL;
805 return NULL;
806 }
807 memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
808 return mMemory;
809 }
810
putStreamBufs()811 void QCamera3MetadataChannel::putStreamBufs()
812 {
813 mMemory->deallocate();
814 delete mMemory;
815 mMemory = NULL;
816 }
817
818 /*===========================================================================
819 * FUNCTION : jpegEvtHandle
820 *
821 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
822 Construct result payload and call mChannelCb to deliver buffer
823 to framework.
824 *
825 * PARAMETERS :
826 * @status : status of jpeg job
827 * @client_hdl: jpeg client handle
828 * @jobId : jpeg job Id
829 * @p_ouput : ptr to jpeg output result struct
830 * @userdata : user data ptr
831 *
832 * RETURN : none
833 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)834 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
835 uint32_t /*client_hdl*/,
836 uint32_t jobId,
837 mm_jpeg_output_t *p_output,
838 void *userdata)
839 {
840 buffer_handle_t *resultBuffer;
841 int32_t resultFrameNumber;
842 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
843 camera3_stream_buffer_t result;
844 camera3_jpeg_blob_t jpegHeader;
845 char* jpeg_eof = 0;
846 int maxJpegSize;
847 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
848 if (obj) {
849 //Construct payload for process_capture_result. Call mChannelCb
850
851 qcamera_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
852
853 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
854 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
855 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
856 }
857
858 //Construct jpeg transient header of type camera3_jpeg_blob_t
859 //Append at the end of jpeg image of buf_filled_len size
860
861 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
862 jpegHeader.jpeg_size = p_output->buf_filled_len;
863
864
865 char* jpeg_buf = (char *)p_output->buf_vaddr;
866
867 if(obj->mJpegSettings->max_jpeg_size <= 0 ||
868 obj->mJpegSettings->max_jpeg_size > obj->mMemory->getSize(obj->mCurrentBufIndex)){
869 ALOGE("%s:Max Jpeg size :%d is out of valid range setting to size of buffer",
870 __func__, obj->mJpegSettings->max_jpeg_size);
871 maxJpegSize = obj->mMemory->getSize(obj->mCurrentBufIndex);
872 } else {
873 maxJpegSize = obj->mJpegSettings->max_jpeg_size;
874 ALOGE("%s: Setting max jpeg size to %d",__func__, maxJpegSize);
875 }
876 jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
877 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
878 obj->mMemory->cleanInvalidateCache(obj->mCurrentBufIndex);
879
880 ////Use below data to issue framework callback
881 resultBuffer = obj->mCamera3Buffers[obj->mCurrentBufIndex];
882 resultFrameNumber = obj->mMemory->getFrameNumber(obj->mCurrentBufIndex);
883
884 result.stream = obj->mCamera3Stream;
885 result.buffer = resultBuffer;
886 result.status = resultStatus;
887 result.acquire_fence = -1;
888 result.release_fence = -1;
889
890 ALOGV("%s: Issue Callback", __func__);
891 obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
892
893 // release internal data for jpeg job
894 if (job != NULL) {
895 obj->m_postprocessor.releaseJpegJobData(job);
896 free(job);
897 }
898 return;
899 // }
900 } else {
901 ALOGE("%s: Null userdata in jpeg callback", __func__);
902 }
903 }
904
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream)905 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
906 mm_camera_ops_t *cam_ops,
907 channel_cb_routine cb_routine,
908 cam_padding_info_t *paddingInfo,
909 void *userData,
910 camera3_stream_t *stream) :
911 QCamera3Channel(cam_handle, cam_ops, cb_routine,
912 paddingInfo, userData),
913 m_postprocessor(this),
914 mCamera3Stream(stream),
915 mNumBufs(0),
916 mCamera3Buffers(NULL),
917 mJpegSettings(NULL),
918 mCurrentBufIndex(-1),
919 mMemory(NULL),
920 mYuvMemory(NULL)
921 {
922 int32_t rc = m_postprocessor.init(jpegEvtHandle, this);
923 if (rc != 0) {
924 ALOGE("Init Postprocessor failed");
925 }
926 }
927
~QCamera3PicChannel()928 QCamera3PicChannel::~QCamera3PicChannel()
929 {
930 int32_t rc = m_postprocessor.stop();
931 if (rc != NO_ERROR) {
932 ALOGE("%s: Postprocessor stop failed", __func__);
933 }
934 rc = m_postprocessor.deinit();
935 if (rc != 0) {
936 ALOGE("De-init Postprocessor failed");
937 }
938 if (mCamera3Buffers) {
939 delete[] mCamera3Buffers;
940 }
941 }
942
initialize()943 int32_t QCamera3PicChannel::initialize()
944 {
945 int32_t rc = NO_ERROR;
946 cam_dimension_t streamDim;
947 cam_stream_type_t streamType;
948 cam_format_t streamFormat;
949 mm_camera_channel_attr_t attr;
950
951 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
952 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
953 attr.look_back = 1;
954 attr.post_frame_skip = 1;
955 attr.water_mark = 1;
956 attr.max_unmatched_frames = 1;
957
958 rc = init(&attr, NULL);
959 if (rc < 0) {
960 ALOGE("%s: init failed", __func__);
961 return rc;
962 }
963
964 streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
965 streamFormat = CAM_FORMAT_YUV_420_NV21;
966 streamDim.width = mCamera3Stream->width;
967 streamDim.height = mCamera3Stream->height;
968
969 int num_buffers = 1;
970
971 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
972 num_buffers);
973
974 return rc;
975 }
976
request(buffer_handle_t * buffer,uint32_t frameNumber,jpeg_settings_t * jpegSettings,mm_camera_buf_def_t * pInputBuffer,QCamera3Channel * pInputChannel)977 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
978 uint32_t frameNumber, jpeg_settings_t* jpegSettings,
979 mm_camera_buf_def_t *pInputBuffer,QCamera3Channel* pInputChannel)
980 {
981 //FIX ME: Return buffer back in case of failures below.
982
983 int32_t rc = NO_ERROR;
984 int index;
985 mJpegSettings = jpegSettings;
986 if(!m_bIsActive) {
987 ALOGD("%s: First request on this channel starting stream",__func__);
988 //Stream on for main image. YUV buffer is queued to the kernel at the end of this call.
989 if(!pInputBuffer)
990 rc = start();
991 else
992 ALOGD("%s: Current request has input buffer no need to start h/w stream", __func__);
993 } else {
994 mStreams[0]->bufDone(0);
995 ALOGD("%s: Request on an existing stream",__func__);
996 }
997
998 if(rc != NO_ERROR) {
999 ALOGE("%s: Failed to start the stream on the request",__func__);
1000 return rc;
1001 }
1002
1003
1004 if(!mMemory) {
1005 if(pInputBuffer) {
1006 mMemory = new QCamera3GrallocMemory();
1007 if (mMemory == NULL) {
1008 return NO_MEMORY;
1009 }
1010
1011 //Registering Jpeg output buffer
1012 if (mMemory->registerBuffers(mNumBufs, mCamera3Buffers) < 0) {
1013 delete mMemory;
1014 mMemory = NULL;
1015 return NO_MEMORY;
1016 }
1017 } else {
1018 ALOGE("%s: error, Gralloc Memory object not yet created for this stream",__func__);
1019 return NO_MEMORY;
1020 }
1021 }
1022
1023 index = mMemory->getMatchBufIndex((void*)buffer);
1024 if(index < 0) {
1025 ALOGE("%s: Could not find object among registered buffers",__func__);
1026 return DEAD_OBJECT;
1027 }
1028 rc = mMemory->markFrameNumber(index, frameNumber);
1029
1030 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1031 mCurrentBufIndex = index;
1032
1033 m_postprocessor.start(mMemory, index, this);
1034
1035 ALOGD("%s: Post-process started", __func__);
1036 if(pInputBuffer) {
1037 ALOGD("%s: Issue call to reprocess", __func__);
1038 m_postprocessor.processAuxiliaryData(pInputBuffer,pInputChannel);
1039 }
1040 return rc;
1041 }
1042
1043 /*===========================================================================
1044 * FUNCTION : dataNotifyCB
1045 *
1046 * DESCRIPTION: Channel Level callback used for super buffer data notify.
1047 * This function is registered with mm-camera-interface to handle
1048 * data notify
1049 *
1050 * PARAMETERS :
1051 * @recvd_frame : stream frame received
1052 * userdata : user data ptr
1053 *
1054 * RETURN : none
1055 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1056 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1057 void *userdata)
1058 {
1059 ALOGV("%s: E\n", __func__);
1060 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1061
1062 if (channel == NULL) {
1063 ALOGE("%s: invalid channel pointer", __func__);
1064 return;
1065 }
1066
1067 if(channel->m_numStreams != 1) {
1068 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1069 return;
1070 }
1071
1072
1073 if(channel->mStreams[0] == NULL) {
1074 ALOGE("%s: Error: Invalid Stream object",__func__);
1075 return;
1076 }
1077
1078 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1079
1080 ALOGV("%s: X\n", __func__);
1081 return;
1082 }
1083
1084
registerBuffers(uint32_t num_buffers,buffer_handle_t ** buffers)1085 int32_t QCamera3PicChannel::registerBuffers(uint32_t num_buffers,
1086 buffer_handle_t **buffers)
1087 {
1088 int rc = 0;
1089 cam_stream_type_t streamType;
1090 cam_format_t streamFormat;
1091
1092 ALOGV("%s: E",__func__);
1093 rc = QCamera3PicChannel::initialize();
1094 if (rc < 0) {
1095 ALOGE("%s: init failed", __func__);
1096 return rc;
1097 }
1098
1099 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_BLOB) {
1100 streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
1101 streamFormat = CAM_FORMAT_YUV_420_NV21;
1102 } else {
1103 //TODO: Fail for other types of streams for now
1104 ALOGE("%s: format is not BLOB", __func__);
1105 return -EINVAL;
1106 }
1107 /* Bookkeep buffer set because they go out of scope after register call */
1108 mNumBufs = num_buffers;
1109 mCamera3Buffers = new buffer_handle_t*[num_buffers];
1110 if (mCamera3Buffers == NULL) {
1111 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
1112 return -ENOMEM;
1113 }
1114 for (size_t i = 0; i < num_buffers; i++)
1115 mCamera3Buffers[i] = buffers[i];
1116
1117 ALOGV("%s: X",__func__);
1118 return rc;
1119 }
1120
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1121 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1122 QCamera3Stream *stream)
1123 {
1124 //TODO
1125 //Used only for getting YUV. Jpeg callback will be sent back from channel
1126 //directly to HWI. Refer to func jpegEvtHandle
1127
1128 //Got the yuv callback. Calling yuv callback handler in PostProc
1129 uint8_t frameIndex;
1130 mm_camera_super_buf_t* frame = NULL;
1131 if(!super_frame) {
1132 ALOGE("%s: Invalid Super buffer",__func__);
1133 return;
1134 }
1135
1136 if(super_frame->num_bufs != 1) {
1137 ALOGE("%s: Multiple streams are not supported",__func__);
1138 return;
1139 }
1140 if(super_frame->bufs[0] == NULL ) {
1141 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1142 __func__);
1143 return;
1144 }
1145
1146 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1147 if(frameIndex >= mNumBufs) {
1148 ALOGE("%s: Error, Invalid index for buffer",__func__);
1149 if(stream) {
1150 stream->bufDone(frameIndex);
1151 }
1152 return;
1153 }
1154
1155 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1156 if (frame == NULL) {
1157 ALOGE("%s: Error allocating memory to save received_frame structure.",
1158 __func__);
1159 if(stream) {
1160 stream->bufDone(frameIndex);
1161 }
1162 return;
1163 }
1164 *frame = *super_frame;
1165
1166 m_postprocessor.processData(frame);
1167 free(super_frame);
1168 return;
1169 }
1170
getStreamBufs(uint32_t len)1171 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1172 {
1173 int rc = 0;
1174
1175 if (mNumBufs == 0 || mCamera3Buffers == NULL) {
1176 ALOGE("%s: buffers not registered yet", __func__);
1177 return NULL;
1178 }
1179
1180 if(mMemory) {
1181 delete mMemory;
1182 mMemory = NULL;
1183 }
1184 mMemory = new QCamera3GrallocMemory();
1185 if (mMemory == NULL) {
1186 return NULL;
1187 }
1188
1189 //Registering Jpeg output buffer
1190 if (mMemory->registerBuffers(mNumBufs, mCamera3Buffers) < 0) {
1191 delete mMemory;
1192 mMemory = NULL;
1193 return NULL;
1194 }
1195
1196 mYuvMemory = new QCamera3HeapMemory();
1197 if (!mYuvMemory) {
1198 ALOGE("%s: unable to create metadata memory", __func__);
1199 return NULL;
1200 }
1201
1202 //Queue YUV buffers in the beginning mQueueAll = true
1203 rc = mYuvMemory->allocate(1, len, true);
1204 if (rc < 0) {
1205 ALOGE("%s: unable to allocate metadata memory", __func__);
1206 delete mYuvMemory;
1207 mYuvMemory = NULL;
1208 return NULL;
1209 }
1210 return mYuvMemory;
1211 }
1212
putStreamBufs()1213 void QCamera3PicChannel::putStreamBufs()
1214 {
1215 mMemory->unregisterBuffers();
1216 delete mMemory;
1217 mMemory = NULL;
1218
1219 mYuvMemory->deallocate();
1220 delete mYuvMemory;
1221 mYuvMemory = NULL;
1222 }
1223
isRawSnapshot()1224 bool QCamera3PicChannel::isRawSnapshot()
1225 {
1226 return !(mJpegSettings->is_jpeg_format);
1227 }
1228 /*===========================================================================
1229 * FUNCTION : getThumbnailSize
1230 *
1231 * DESCRIPTION: get user set thumbnail size
1232 *
1233 * PARAMETERS :
1234 * @dim : output of thumbnail dimension
1235 *
1236 * RETURN : none
1237 *==========================================================================*/
getThumbnailSize(cam_dimension_t & dim)1238 void QCamera3PicChannel::getThumbnailSize(cam_dimension_t &dim)
1239 {
1240 dim = mJpegSettings->thumbnail_size;
1241 }
1242
1243 /*===========================================================================
1244 * FUNCTION : getJpegQuality
1245 *
1246 * DESCRIPTION: get user set jpeg quality
1247 *
1248 * PARAMETERS : none
1249 *
1250 * RETURN : jpeg quality setting
1251 *==========================================================================*/
getJpegQuality()1252 int QCamera3PicChannel::getJpegQuality()
1253 {
1254 int quality = mJpegSettings->jpeg_quality;
1255 if (quality < 0) {
1256 quality = 85; //set to default quality value
1257 }
1258 return quality;
1259 }
1260
1261 /*===========================================================================
1262 * FUNCTION : getJpegRotation
1263 *
1264 * DESCRIPTION: get rotation information to be passed into jpeg encoding
1265 *
1266 * PARAMETERS : none
1267 *
1268 * RETURN : rotation information
1269 *==========================================================================*/
getJpegRotation()1270 int QCamera3PicChannel::getJpegRotation() {
1271 int rotation = mJpegSettings->jpeg_orientation;
1272 if (rotation < 0) {
1273 rotation = 0;
1274 }
1275 return rotation;
1276 }
1277
queueMetadata(mm_camera_super_buf_t * metadata_buf)1278 void QCamera3PicChannel::queueMetadata(mm_camera_super_buf_t *metadata_buf)
1279 {
1280 m_postprocessor.processPPMetadata(metadata_buf);
1281 }
1282 /*===========================================================================
1283 * FUNCTION : getRational
1284 *
1285 * DESCRIPTION: compose rational struct
1286 *
1287 * PARAMETERS :
1288 * @rat : ptr to struct to store rational info
1289 * @num :num of the rational
1290 * @denom : denom of the rational
1291 *
1292 * RETURN : int32_t type of status
1293 * NO_ERROR -- success
1294 * none-zero failure code
1295 *==========================================================================*/
getRational(rat_t * rat,int num,int denom)1296 int32_t getRational(rat_t *rat, int num, int denom)
1297 {
1298 if (NULL == rat) {
1299 ALOGE("%s: NULL rat input", __func__);
1300 return BAD_VALUE;
1301 }
1302 rat->num = num;
1303 rat->denom = denom;
1304 return NO_ERROR;
1305 }
1306
1307 /*===========================================================================
1308 * FUNCTION : getRational
1309 *
1310 * DESCRIPTION: compose rational struct
1311 *
1312 * PARAMETERS :
1313 * @rat : ptr to struct to store rational info
1314 * @num :num of the rational
1315 * @denom : denom of the rational
1316 *
1317 * RETURN : int32_t type of status
1318 * NO_ERROR -- success
1319 * none-zero failure code
1320 *==========================================================================*/
getRationalExposureTime(rat_t * rat,double num,double denom)1321 int32_t getRationalExposureTime(rat_t *rat, double num, double denom)
1322 {
1323 if (NULL == rat) {
1324 ALOGE("%s: NULL rat input", __func__);
1325 return BAD_VALUE;
1326 }
1327 rat->num = num;
1328 rat->denom = round(1.0 / denom);
1329 return NO_ERROR;
1330 }
1331
1332 /*===========================================================================
1333 * FUNCTION : parseGPSCoordinate
1334 *
1335 * DESCRIPTION: parse GPS coordinate string
1336 *
1337 * PARAMETERS :
1338 * @coord_str : [input] coordinate string
1339 * @coord : [output] ptr to struct to store coordinate
1340 *
1341 * RETURN : int32_t type of status
1342 * NO_ERROR -- success
1343 * none-zero failure code
1344 *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)1345 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
1346 {
1347 if(coord == NULL) {
1348 ALOGE("%s: error, invalid argument coord == NULL", __func__);
1349 return BAD_VALUE;
1350 }
1351 float degF = atof(coord_str);
1352 if (degF < 0) {
1353 degF = -degF;
1354 }
1355 float minF = (degF - (int) degF) * 60;
1356 float secF = (minF - (int) minF) * 60;
1357
1358 getRational(&coord[0], (int)degF, 1);
1359 getRational(&coord[1], (int)minF, 1);
1360 getRational(&coord[2], (int)(secF * 10000), 10000);
1361 return NO_ERROR;
1362 }
1363
1364 /*===========================================================================
1365 * FUNCTION : getExifDateTime
1366 *
1367 * DESCRIPTION: query exif date time
1368 *
1369 * PARAMETERS :
1370 * @dateTime : string to store exif date time
1371 * @count : lenght of the dateTime string
1372 *
1373 * RETURN : int32_t type of status
1374 * NO_ERROR -- success
1375 * none-zero failure code
1376 *==========================================================================*/
getExifDateTime(char * dateTime,uint32_t & count)1377 int32_t getExifDateTime(char *dateTime, uint32_t &count)
1378 {
1379 //get time and date from system
1380 time_t rawtime;
1381 struct tm * timeinfo;
1382 time(&rawtime);
1383 timeinfo = localtime (&rawtime);
1384 //Write datetime according to EXIF Spec
1385 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
1386 snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
1387 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1388 timeinfo->tm_mday, timeinfo->tm_hour,
1389 timeinfo->tm_min, timeinfo->tm_sec);
1390 count = 20;
1391
1392 return NO_ERROR;
1393 }
1394
1395 /*===========================================================================
1396 * FUNCTION : getExifFocalLength
1397 *
1398 * DESCRIPTION: get exif focal lenght
1399 *
1400 * PARAMETERS :
1401 * @focalLength : ptr to rational strcut to store focal lenght
1402 *
1403 * RETURN : int32_t type of status
1404 * NO_ERROR -- success
1405 * none-zero failure code
1406 *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)1407 int32_t getExifFocalLength(rat_t *focalLength, float value)
1408 {
1409 int focalLengthValue =
1410 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
1411 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
1412 }
1413
1414 /*===========================================================================
1415 * FUNCTION : getExifExpTimeInfo
1416 *
1417 * DESCRIPTION: get exif exposure time information
1418 *
1419 * PARAMETERS :
1420 * @expoTimeInfo : expousure time value
1421 * RETURN : nt32_t type of status
1422 * NO_ERROR -- success
1423 * none-zero failure code
1424 *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)1425 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
1426 {
1427
1428 float cal_exposureTime;
1429 if (value != 0)
1430 cal_exposureTime = (double)(value / 1000000000.0);
1431 else
1432 cal_exposureTime = 60.00;
1433 return getRationalExposureTime(expoTimeInfo, 1, cal_exposureTime);
1434 }
1435
1436 /*===========================================================================
1437 * FUNCTION : getExifGpsProcessingMethod
1438 *
1439 * DESCRIPTION: get GPS processing method
1440 *
1441 * PARAMETERS :
1442 * @gpsProcessingMethod : string to store GPS process method
1443 * @count : lenght of the string
1444 *
1445 * RETURN : int32_t type of status
1446 * NO_ERROR -- success
1447 * none-zero failure code
1448 *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)1449 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
1450 uint32_t &count, char* value)
1451 {
1452 if(value != NULL) {
1453 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
1454 count = EXIF_ASCII_PREFIX_SIZE;
1455 strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
1456 count += strlen(value);
1457 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
1458 return NO_ERROR;
1459 } else {
1460 return BAD_VALUE;
1461 }
1462 }
1463
1464 /*===========================================================================
1465 * FUNCTION : getExifLatitude
1466 *
1467 * DESCRIPTION: get exif latitude
1468 *
1469 * PARAMETERS :
1470 * @latitude : ptr to rational struct to store latitude info
1471 * @ladRef : charater to indicate latitude reference
1472 *
1473 * RETURN : int32_t type of status
1474 * NO_ERROR -- success
1475 * none-zero failure code
1476 *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)1477 int32_t getExifLatitude(rat_t *latitude,
1478 char *latRef, double value)
1479 {
1480 char str[30];
1481 snprintf(str, sizeof(str), "%f", value);
1482 parseGPSCoordinate(str, latitude);
1483
1484 //set Latitude Ref
1485 float latitudeValue = strtof(str, 0);
1486 if(latitudeValue < 0.0f) {
1487 latRef[0] = 'S';
1488 } else {
1489 latRef[0] = 'N';
1490 }
1491 latRef[1] = '\0';
1492 return NO_ERROR;
1493 }
1494
1495 /*===========================================================================
1496 * FUNCTION : getExifLongitude
1497 *
1498 * DESCRIPTION: get exif longitude
1499 *
1500 * PARAMETERS :
1501 * @longitude : ptr to rational struct to store longitude info
1502 * @lonRef : charater to indicate longitude reference
1503 *
1504 * RETURN : int32_t type of status
1505 * NO_ERROR -- success
1506 * none-zero failure code
1507 *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)1508 int32_t getExifLongitude(rat_t *longitude,
1509 char *lonRef, double value)
1510 {
1511 char str[30];
1512 snprintf(str, sizeof(str), "%f", value);
1513 parseGPSCoordinate(str, longitude);
1514
1515 //set Longitude Ref
1516 float longitudeValue = strtof(str, 0);
1517 if(longitudeValue < 0.0f) {
1518 lonRef[0] = 'W';
1519 } else {
1520 lonRef[0] = 'E';
1521 }
1522 lonRef[1] = '\0';
1523 return NO_ERROR;
1524 }
1525
1526 /*===========================================================================
1527 * FUNCTION : getExifAltitude
1528 *
1529 * DESCRIPTION: get exif altitude
1530 *
1531 * PARAMETERS :
1532 * @altitude : ptr to rational struct to store altitude info
1533 * @altRef : charater to indicate altitude reference
1534 *
1535 * RETURN : int32_t type of status
1536 * NO_ERROR -- success
1537 * none-zero failure code
1538 *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)1539 int32_t getExifAltitude(rat_t *altitude,
1540 char *altRef, double value)
1541 {
1542 char str[30];
1543 snprintf(str, sizeof(str), "%f", value);
1544 value = atof(str);
1545 *altRef = 0;
1546 if(value < 0){
1547 *altRef = 1;
1548 value = -value;
1549 }
1550 return getRational(altitude, value*1000, 1000);
1551 }
1552
1553 /*===========================================================================
1554 * FUNCTION : getExifGpsDateTimeStamp
1555 *
1556 * DESCRIPTION: get exif GPS date time stamp
1557 *
1558 * PARAMETERS :
1559 * @gpsDateStamp : GPS date time stamp string
1560 * @bufLen : length of the string
1561 * @gpsTimeStamp : ptr to rational struct to store time stamp info
1562 *
1563 * RETURN : int32_t type of status
1564 * NO_ERROR -- success
1565 * none-zero failure code
1566 *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)1567 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
1568 uint32_t bufLen,
1569 rat_t *gpsTimeStamp, int64_t value)
1570 {
1571 char str[30];
1572 snprintf(str, sizeof(str), "%lld", value);
1573 time_t unixTime = (time_t)atol(str);
1574 struct tm *UTCTimestamp = gmtime(&unixTime);
1575
1576 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
1577
1578 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
1579 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
1580 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
1581
1582 return NO_ERROR;
1583 }
1584
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)1585 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
1586 cam_rational_type_t step)
1587 {
1588 exposure_val->num = exposure_comp * step.numerator;
1589 exposure_val->denom = step.denominator;
1590 return 0;
1591 }
1592 /*===========================================================================
1593 * FUNCTION : getExifData
1594 *
1595 * DESCRIPTION: get exif data to be passed into jpeg encoding
1596 *
1597 * PARAMETERS : none
1598 *
1599 * RETURN : exif data from user setting and GPS
1600 *==========================================================================*/
getExifData()1601 QCamera3Exif *QCamera3PicChannel::getExifData()
1602 {
1603 QCamera3Exif *exif = new QCamera3Exif();
1604 if (exif == NULL) {
1605 ALOGE("%s: No memory for QCamera3Exif", __func__);
1606 return NULL;
1607 }
1608
1609 int32_t rc = NO_ERROR;
1610 uint32_t count = 0;
1611
1612 // add exif entries
1613 char dateTime[20];
1614 memset(dateTime, 0, sizeof(dateTime));
1615 count = 20;
1616 rc = getExifDateTime(dateTime, count);
1617 if(rc == NO_ERROR) {
1618 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
1619 EXIF_ASCII,
1620 count,
1621 (void *)dateTime);
1622 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
1623 EXIF_ASCII,
1624 count,
1625 (void *)dateTime);
1626 } else {
1627 ALOGE("%s: getExifDateTime failed", __func__);
1628 }
1629
1630 rat_t focalLength;
1631 rc = getExifFocalLength(&focalLength, mJpegSettings->lens_focal_length);
1632 if (rc == NO_ERROR) {
1633 exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
1634 EXIF_RATIONAL,
1635 1,
1636 (void *)&(focalLength));
1637 } else {
1638 ALOGE("%s: getExifFocalLength failed", __func__);
1639 }
1640
1641 uint16_t isoSpeed = (uint16_t)mJpegSettings->sensor_sensitivity;
1642 if(isoSpeed == 0) {
1643 isoSpeed = (uint16_t)(mJpegSettings->lens_focal_length + 0.5)*100;
1644 }
1645
1646 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
1647 EXIF_SHORT,
1648 1,
1649 (void *)&(isoSpeed));
1650
1651 rat_t sensorExpTime, temp;
1652 rc = getExifExpTimeInfo(&sensorExpTime, (int64_t)mJpegSettings->sensor_exposure_time);
1653 if(sensorExpTime.denom <= 0) {// avoid zero-divide problem
1654 sensorExpTime.denom = 0.01668; // expoure time will be 1/60 s
1655 uint16_t temp2 = (uint16_t)(sensorExpTime.denom <= 0 * 100000);
1656 temp2 = (uint16_t)(100000 / temp2);
1657 temp.num = 1;
1658 temp.denom = temp2;
1659 memcpy(&sensorExpTime, &temp, sizeof(sensorExpTime));
1660 }
1661 exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
1662 EXIF_LONG,
1663 1,
1664 (void *) &(sensorExpTime.denom));
1665
1666 if (strlen(mJpegSettings->gps_processing_method) > 0) {
1667 char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
1668 count = 0;
1669 rc = getExifGpsProcessingMethod(gpsProcessingMethod, count, mJpegSettings->gps_processing_method);
1670 if(rc == NO_ERROR) {
1671 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
1672 EXIF_ASCII,
1673 count,
1674 (void *)gpsProcessingMethod);
1675 } else {
1676 ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
1677 }
1678 }
1679
1680 if (mJpegSettings->gps_coordinates[0]) {
1681 rat_t latitude[3];
1682 char latRef[2];
1683 rc = getExifLatitude(latitude, latRef, *(mJpegSettings->gps_coordinates[0]));
1684 if(rc == NO_ERROR) {
1685 exif->addEntry(EXIFTAGID_GPS_LATITUDE,
1686 EXIF_RATIONAL,
1687 3,
1688 (void *)latitude);
1689 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
1690 EXIF_ASCII,
1691 2,
1692 (void *)latRef);
1693 } else {
1694 ALOGE("%s: getExifLatitude failed", __func__);
1695 }
1696 }
1697
1698 if (mJpegSettings->gps_coordinates[1]) {
1699 rat_t longitude[3];
1700 char lonRef[2];
1701 rc = getExifLongitude(longitude, lonRef, *(mJpegSettings->gps_coordinates[1]));
1702 if(rc == NO_ERROR) {
1703 exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
1704 EXIF_RATIONAL,
1705 3,
1706 (void *)longitude);
1707
1708 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
1709 EXIF_ASCII,
1710 2,
1711 (void *)lonRef);
1712 } else {
1713 ALOGE("%s: getExifLongitude failed", __func__);
1714 }
1715 }
1716
1717 if (mJpegSettings->gps_coordinates[2]) {
1718 rat_t altitude;
1719 char altRef;
1720 rc = getExifAltitude(&altitude, &altRef, *(mJpegSettings->gps_coordinates[2]));
1721 if(rc == NO_ERROR) {
1722 exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
1723 EXIF_RATIONAL,
1724 1,
1725 (void *)&(altitude));
1726
1727 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
1728 EXIF_BYTE,
1729 1,
1730 (void *)&altRef);
1731 } else {
1732 ALOGE("%s: getExifAltitude failed", __func__);
1733 }
1734 }
1735
1736 if (mJpegSettings->gps_timestamp) {
1737 char gpsDateStamp[20];
1738 rat_t gpsTimeStamp[3];
1739 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp, *(mJpegSettings->gps_timestamp));
1740 if(rc == NO_ERROR) {
1741 exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
1742 EXIF_ASCII,
1743 strlen(gpsDateStamp) + 1,
1744 (void *)gpsDateStamp);
1745
1746 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
1747 EXIF_RATIONAL,
1748 3,
1749 (void *)gpsTimeStamp);
1750 } else {
1751 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
1752 }
1753 }
1754
1755 srat_t exposure_val;
1756 rc = getExifExposureValue(&exposure_val, mJpegSettings->exposure_compensation,
1757 mJpegSettings->exposure_comp_step);
1758 if(rc == NO_ERROR) {
1759 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
1760 EXIF_SRATIONAL,
1761 1,
1762 (void *)(&exposure_val));
1763 } else {
1764 ALOGE("%s: getExifExposureValue failed ", __func__);
1765 }
1766
1767 char value[PROPERTY_VALUE_MAX];
1768 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
1769 exif->addEntry(EXIFTAGID_MAKE,
1770 EXIF_ASCII,
1771 strlen(value) + 1,
1772 (void *)value);
1773 } else {
1774 ALOGE("%s: getExifMaker failed", __func__);
1775 }
1776
1777 if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
1778 exif->addEntry(EXIFTAGID_MODEL,
1779 EXIF_ASCII,
1780 strlen(value) + 1,
1781 (void *)value);
1782 } else {
1783 ALOGE("%s: getExifModel failed", __func__);
1784 }
1785
1786 float f_number = 0.0f;
1787 f_number = mJpegSettings->f_number;
1788 rat_t aperture;
1789 getRational(&aperture, (uint32_t)f_number*F_NUMBER_DECIMAL_PRECISION, (uint32_t)F_NUMBER_DECIMAL_PRECISION);
1790 exif->addEntry(EXIFTAGID_APERTURE,
1791 EXIF_RATIONAL,
1792 1,
1793 (void *)&(aperture));
1794
1795 exif->addEntry(EXIFTAGID_F_NUMBER,
1796 EXIF_RATIONAL,
1797 1,
1798 (void *)&(aperture));
1799
1800 uint16_t flash = mJpegSettings->flash;
1801 exif->addEntry(EXIFTAGID_FLASH,
1802 EXIF_SHORT, 1,
1803 (void *)&(flash));
1804
1805 int wb;
1806 short val_short;
1807 wb = mJpegSettings->wb;
1808 if(wb == CAM_WB_MODE_AUTO)
1809 val_short = 0;
1810 else
1811 val_short = 1;
1812
1813 exif->addEntry(EXIFTAGID_WHITE_BALANCE,
1814 EXIF_SHORT,
1815 1,
1816 (void *)&(wb));
1817
1818 struct timeval tv;
1819 char subsecTime[7];
1820 gettimeofday(&tv, NULL);
1821 snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
1822
1823 exif->addEntry(EXIFTAGID_SUBSEC_TIME,
1824 EXIF_ASCII, 7,
1825 (void *)subsecTime);
1826
1827 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
1828 EXIF_ASCII, 7,
1829 (void *)subsecTime);
1830
1831 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
1832 EXIF_ASCII, 7,
1833 (void *)subsecTime);
1834
1835 return exif;
1836 }
1837
1838 int QCamera3PicChannel::kMaxBuffers = 2;
1839
1840 /*===========================================================================
1841 * FUNCTION : QCamera3ReprocessChannel
1842 *
1843 * DESCRIPTION: constructor of QCamera3ReprocessChannel
1844 *
1845 * PARAMETERS :
1846 * @cam_handle : camera handle
1847 * @cam_ops : ptr to camera ops table
1848 * @pp_mask : post-proccess feature mask
1849 *
1850 * RETURN : none
1851 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,void * ch_hdl)1852 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
1853 mm_camera_ops_t *cam_ops,
1854 channel_cb_routine cb_routine,
1855 cam_padding_info_t *paddingInfo,
1856 void *userData, void *ch_hdl) :
1857 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, userData),
1858 picChHandle(ch_hdl),
1859 m_pSrcChannel(NULL),
1860 m_pMetaChannel(NULL),
1861 m_metaFrame(NULL),
1862 mMemory(NULL)
1863 {
1864 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
1865 }
1866
1867
1868 /*===========================================================================
1869 * FUNCTION : QCamera3ReprocessChannel
1870 *
1871 * DESCRIPTION: constructor of QCamera3ReprocessChannel
1872 *
1873 * PARAMETERS :
1874 * @cam_handle : camera handle
1875 * @cam_ops : ptr to camera ops table
1876 * @pp_mask : post-proccess feature mask
1877 *
1878 * RETURN : none
1879 *==========================================================================*/
initialize()1880 int32_t QCamera3ReprocessChannel::initialize()
1881 {
1882 int32_t rc = NO_ERROR;
1883 mm_camera_channel_attr_t attr;
1884
1885 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1886 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
1887 attr.max_unmatched_frames = 1;
1888
1889 rc = init(&attr, NULL);
1890 if (rc < 0) {
1891 ALOGE("%s: init failed", __func__);
1892 }
1893 return rc;
1894 }
1895
1896
1897 /*===========================================================================
1898 * FUNCTION : QCamera3ReprocessChannel
1899 *
1900 * DESCRIPTION: constructor of QCamera3ReprocessChannel
1901 *
1902 * PARAMETERS :
1903 * @cam_handle : camera handle
1904 * @cam_ops : ptr to camera ops table
1905 * @pp_mask : post-proccess feature mask
1906 *
1907 * RETURN : none
1908 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1909 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1910 QCamera3Stream *stream)
1911 {
1912 //Got the pproc data callback. Now send to jpeg encoding
1913 uint8_t frameIndex;
1914 mm_camera_super_buf_t* frame = NULL;
1915 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
1916
1917 if(!super_frame) {
1918 ALOGE("%s: Invalid Super buffer",__func__);
1919 return;
1920 }
1921
1922 if(super_frame->num_bufs != 1) {
1923 ALOGE("%s: Multiple streams are not supported",__func__);
1924 return;
1925 }
1926 if(super_frame->bufs[0] == NULL ) {
1927 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1928 __func__);
1929 return;
1930 }
1931
1932 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1933 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1934 if (frame == NULL) {
1935 ALOGE("%s: Error allocating memory to save received_frame structure.",
1936 __func__);
1937 if(stream) {
1938 stream->bufDone(frameIndex);
1939 }
1940 return;
1941 }
1942 *frame = *super_frame;
1943 //queue back the metadata buffer
1944 if (m_metaFrame != NULL) {
1945 ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(m_metaFrame);
1946 free(m_metaFrame);
1947 m_metaFrame = NULL;
1948 } else {
1949 ALOGE("%s: Meta frame was NULL", __func__);
1950 }
1951 obj->m_postprocessor.processPPData(frame);
1952 return;
1953 }
1954
1955 /*===========================================================================
1956 * FUNCTION : QCamera3ReprocessChannel
1957 *
1958 * DESCRIPTION: default constructor of QCamera3ReprocessChannel
1959 *
1960 * PARAMETERS : none
1961 *
1962 * RETURN : none
1963 *==========================================================================*/
QCamera3ReprocessChannel()1964 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
1965 m_pSrcChannel(NULL),
1966 m_pMetaChannel(NULL),
1967 m_metaFrame(NULL)
1968 {
1969 }
1970
1971 /*===========================================================================
1972 * FUNCTION : QCamera3ReprocessChannel
1973 *
1974 * DESCRIPTION: register the buffers of the reprocess channel
1975 *
1976 * PARAMETERS : none
1977 *
1978 * RETURN : none
1979 *==========================================================================*/
registerBuffers(uint32_t num_buffers,buffer_handle_t ** buffers)1980 int32_t QCamera3ReprocessChannel::registerBuffers(uint32_t num_buffers, buffer_handle_t **buffers)
1981 {
1982 return 0;
1983 }
1984
1985 /*===========================================================================
1986 * FUNCTION : getStreamBufs
1987 *
1988 * DESCRIPTION: register the buffers of the reprocess channel
1989 *
1990 * PARAMETERS : none
1991 *
1992 * RETURN : QCamera3Memory *
1993 *==========================================================================*/
getStreamBufs(uint32_t len)1994 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
1995 {
1996 int rc = 0;
1997
1998 mMemory = new QCamera3HeapMemory();
1999 if (!mMemory) {
2000 ALOGE("%s: unable to create reproc memory", __func__);
2001 return NULL;
2002 }
2003
2004 //Queue YUV buffers in the beginning mQueueAll = true
2005 rc = mMemory->allocate(2, len, true);
2006 if (rc < 0) {
2007 ALOGE("%s: unable to allocate reproc memory", __func__);
2008 delete mMemory;
2009 mMemory = NULL;
2010 return NULL;
2011 }
2012 return mMemory;
2013 }
2014
2015 /*===========================================================================
2016 * FUNCTION : getStreamBufs
2017 *
2018 * DESCRIPTION: register the buffers of the reprocess channel
2019 *
2020 * PARAMETERS : none
2021 *
2022 * RETURN :
2023 *==========================================================================*/
putStreamBufs()2024 void QCamera3ReprocessChannel::putStreamBufs()
2025 {
2026 mMemory->deallocate();
2027 delete mMemory;
2028 mMemory = NULL;
2029 }
2030
2031 /*===========================================================================
2032 * FUNCTION : ~QCamera3ReprocessChannel
2033 *
2034 * DESCRIPTION: destructor of QCamera3ReprocessChannel
2035 *
2036 * PARAMETERS : none
2037 *
2038 * RETURN : none
2039 *==========================================================================*/
~QCamera3ReprocessChannel()2040 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2041 {
2042 }
2043
2044 /*===========================================================================
2045 * FUNCTION : getStreamBySourceHandle
2046 *
2047 * DESCRIPTION: find reprocess stream by its source stream handle
2048 *
2049 * PARAMETERS :
2050 * @srcHandle : source stream handle
2051 *
2052 * RETURN : ptr to reprocess stream if found. NULL if not found
2053 *==========================================================================*/
getStreamBySourceHandle(uint32_t srcHandle)2054 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySourceHandle(uint32_t srcHandle)
2055 {
2056 QCamera3Stream *pStream = NULL;
2057
2058 for (int i = 0; i < m_numStreams; i++) {
2059 if (mSrcStreamHandles[i] == srcHandle) {
2060 pStream = mStreams[i];
2061 break;
2062 }
2063 }
2064 return pStream;
2065 }
2066
2067 /*===========================================================================
2068 * FUNCTION : doReprocess
2069 *
2070 * DESCRIPTION: request to do a reprocess on the frame
2071 *
2072 * PARAMETERS :
2073 * @frame : frame to be performed a reprocess
2074 *
2075 * RETURN : int32_t type of status
2076 * NO_ERROR -- success
2077 * none-zero failure code
2078 *==========================================================================*/
doReprocess(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * meta_frame)2079 int32_t QCamera3ReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
2080 mm_camera_super_buf_t *meta_frame)
2081 {
2082 int32_t rc = 0;
2083 if (m_numStreams < 1) {
2084 ALOGE("%s: No reprocess stream is created", __func__);
2085 return -1;
2086 }
2087 if (m_pSrcChannel == NULL) {
2088 ALOGE("%s: No source channel for reprocess", __func__);
2089 return -1;
2090 }
2091 m_metaFrame = meta_frame;
2092 for (int i = 0; i < frame->num_bufs; i++) {
2093 QCamera3Stream *pStream = getStreamBySourceHandle(frame->bufs[i]->stream_id);
2094 if (pStream != NULL) {
2095 cam_stream_parm_buffer_t param;
2096 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2097 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2098 param.reprocess.buf_index = frame->bufs[i]->buf_idx;
2099 param.reprocess.frame_idx = frame->bufs[i]->frame_idx;
2100 if (meta_frame != NULL) {
2101 param.reprocess.meta_present = 1;
2102 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2103 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2104 }
2105 rc = pStream->setParameter(param);
2106 if (rc != NO_ERROR) {
2107 ALOGE("%s: stream setParameter for reprocess failed", __func__);
2108 break;
2109 }
2110 }
2111 }
2112 return rc;
2113 }
2114
2115 /*===========================================================================
2116 * FUNCTION : doReprocess
2117 *
2118 * DESCRIPTION: request to do a reprocess on the frame
2119 *
2120 * PARAMETERS :
2121 * @buf_fd : fd to the input buffer that needs reprocess
2122 * @buf_lenght : length of the input buffer
2123 * @ret_val : result of reprocess.
2124 * Example: Could be faceID in case of register face image.
2125 *
2126 * RETURN : int32_t type of status
2127 * NO_ERROR -- success
2128 * none-zero failure code
2129 *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)2130 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
2131 uint32_t buf_length,
2132 int32_t &ret_val,
2133 mm_camera_super_buf_t *meta_frame)
2134 {
2135 int32_t rc = 0;
2136 if (m_numStreams < 1) {
2137 ALOGE("%s: No reprocess stream is created", __func__);
2138 return -1;
2139 }
2140 if (meta_frame == NULL) {
2141 ALOGE("%s: Did not get corresponding metadata in time", __func__);
2142 return -1;
2143 }
2144
2145 uint32_t buf_idx = 0;
2146 for (int i = 0; i < m_numStreams; i++) {
2147 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2148 buf_idx, -1,
2149 buf_fd, buf_length);
2150
2151 if (rc == NO_ERROR) {
2152 cam_stream_parm_buffer_t param;
2153 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2154 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2155 param.reprocess.buf_index = buf_idx;
2156 param.reprocess.meta_present = 1;
2157 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2158 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2159 rc = mStreams[i]->setParameter(param);
2160 if (rc == NO_ERROR) {
2161 ret_val = param.reprocess.ret_val;
2162 }
2163 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2164 buf_idx, -1);
2165 }
2166 }
2167 return rc;
2168 }
2169
2170 /*===========================================================================
2171 * FUNCTION : addReprocStreamsFromSource
2172 *
2173 * DESCRIPTION: add reprocess streams from input source channel
2174 *
2175 * PARAMETERS :
2176 * @config : pp feature configuration
2177 * @pSrcChannel : ptr to input source channel that needs reprocess
2178 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
2179 *
2180 * RETURN : int32_t type of status
2181 * NO_ERROR -- success
2182 * none-zero failure code
2183 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & config,QCamera3Channel * pSrcChannel,QCamera3Channel * pMetaChannel)2184 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &config,
2185 QCamera3Channel *pSrcChannel,
2186 QCamera3Channel *pMetaChannel)
2187 {
2188 int32_t rc = 0;
2189 QCamera3Stream *pSrcStream = pSrcChannel->getStreamByIndex(0);
2190 if (pSrcStream == NULL) {
2191 ALOGE("%s: source channel doesn't have a stream", __func__);
2192 return BAD_VALUE;
2193 }
2194 cam_stream_reproc_config_t reprocess_config;
2195 cam_dimension_t streamDim;
2196 cam_stream_type_t streamType;
2197 cam_format_t streamFormat;
2198 cam_frame_len_offset_t frameOffset;
2199 int num_buffers = 2;
2200
2201 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
2202 pSrcStream->getFormat(streamFormat);
2203 pSrcStream->getFrameDimension(streamDim);
2204 pSrcStream->getFrameOffset(frameOffset);
2205
2206 reprocess_config.pp_type = CAM_ONLINE_REPROCESS_TYPE;
2207 reprocess_config.online.input_stream_id = pSrcStream->getMyServerID();
2208 reprocess_config.online.input_stream_type = pSrcStream->getMyType();
2209 reprocess_config.pp_feature_config = config;
2210
2211 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
2212
2213 if (reprocess_config.pp_feature_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
2214 if (reprocess_config.pp_feature_config.rotation == ROTATE_90 ||
2215 reprocess_config.pp_feature_config.rotation == ROTATE_270) {
2216 // rotated by 90 or 270, need to switch width and height
2217 int32_t temp = streamDim.height;
2218 streamDim.height = streamDim.width;
2219 streamDim.width = temp;
2220 }
2221 }
2222
2223 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
2224 m_handle,
2225 m_camOps,
2226 mPaddingInfo,
2227 (QCamera3Channel*)this);
2228 if (pStream == NULL) {
2229 ALOGE("%s: No mem for Stream", __func__);
2230 return NO_MEMORY;
2231 }
2232
2233 rc = pStream->init(streamType, streamFormat, streamDim, &reprocess_config,
2234 num_buffers,QCamera3Channel::streamCbRoutine, this);
2235
2236
2237 if (rc == 0) {
2238 mStreams[m_numStreams] = pStream;
2239 m_numStreams++;
2240 } else {
2241 ALOGE("%s: failed to create reprocess stream", __func__);
2242 delete pStream;
2243 }
2244
2245 if (rc == NO_ERROR) {
2246 m_pSrcChannel = pSrcChannel;
2247 m_pMetaChannel = pMetaChannel;
2248 }
2249 if(m_camOps->request_super_buf(m_camHandle,m_handle,1) < 0) {
2250 ALOGE("%s: Request for super buffer failed",__func__);
2251 }
2252 return rc;
2253 }
2254
2255
2256 }; // namespace qcamera
2257