1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3Channel"
32 //#define LOG_NDEBUG 0
33 #include <fcntl.h>
34 #include <stdlib.h>
35 #include <cstdlib>
36 #include <stdio.h>
37 #include <string.h>
38 #include <hardware/camera3.h>
39 #include <system/camera_metadata.h>
40 #include <gralloc_priv.h>
41 #include <utils/Log.h>
42 #include <utils/Errors.h>
43 #include <utils/Trace.h>
44 #include <cutils/properties.h>
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47 
48 using namespace android;
49 
50 #define MIN_STREAMING_BUFFER_NUM 7+11
51 
52 namespace qcamera {
53 static const char ExifAsciiPrefix[] =
54     { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
55 static const char ExifUndefinedPrefix[] =
56     { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
57 
58 #define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
59 #define FOCAL_LENGTH_DECIMAL_PRECISION   100
60 
61 #define VIDEO_FORMAT    CAM_FORMAT_YUV_420_NV12
62 #define SNAPSHOT_FORMAT CAM_FORMAT_YUV_420_NV21
63 #define PREVIEW_FORMAT  CAM_FORMAT_YUV_420_NV21
64 #define DEFAULT_FORMAT  CAM_FORMAT_YUV_420_NV21
65 #define CALLBACK_FORMAT CAM_FORMAT_YUV_420_NV21
66 #define RAW_FORMAT      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG
67 
68 /*===========================================================================
69  * FUNCTION   : QCamera3Channel
70  *
71  * DESCRIPTION: constrcutor of QCamera3Channel
72  *
73  * PARAMETERS :
74  *   @cam_handle : camera handle
75  *   @cam_ops    : ptr to camera ops table
76  *
77  * RETURN     : none
78  *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)79 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
80                                mm_camera_ops_t *cam_ops,
81                                channel_cb_routine cb_routine,
82                                cam_padding_info_t *paddingInfo,
83                                uint32_t postprocess_mask,
84                                void *userData)
85 {
86     m_camHandle = cam_handle;
87     m_camOps = cam_ops;
88     m_bIsActive = false;
89 
90     m_handle = 0;
91     m_numStreams = 0;
92     memset(mStreams, 0, sizeof(mStreams));
93     mUserData = userData;
94 
95     mStreamInfoBuf = NULL;
96     mChannelCB = cb_routine;
97     mPaddingInfo = paddingInfo;
98 
99     mPostProcMask = postprocess_mask;
100 
101     char prop[PROPERTY_VALUE_MAX];
102     property_get("persist.camera.yuv.dump", prop, "0");
103     mYUVDump = atoi(prop);
104     mIsType = IS_TYPE_NONE;
105 }
106 
107 /*===========================================================================
108  * FUNCTION   : QCamera3Channel
109  *
110  * DESCRIPTION: default constrcutor of QCamera3Channel
111  *
112  * PARAMETERS : none
113  *
114  * RETURN     : none
115  *==========================================================================*/
QCamera3Channel()116 QCamera3Channel::QCamera3Channel()
117 {
118     m_camHandle = 0;
119     m_camOps = NULL;
120     m_bIsActive = false;
121 
122     m_handle = 0;
123     m_numStreams = 0;
124     memset(mStreams, 0, sizeof(mStreams));
125     mUserData = NULL;
126 
127     mStreamInfoBuf = NULL;
128     mChannelCB = NULL;
129     mPaddingInfo = NULL;
130 
131     mPostProcMask = 0;
132 }
133 
134 /*===========================================================================
135  * FUNCTION   : ~QCamera3Channel
136  *
137  * DESCRIPTION: destructor of QCamera3Channel
138  *
139  * PARAMETERS : none
140  *
141  * RETURN     : none
142  *==========================================================================*/
~QCamera3Channel()143 QCamera3Channel::~QCamera3Channel()
144 {
145     if (m_bIsActive)
146         stop();
147 
148     for (int i = 0; i < m_numStreams; i++) {
149         if (mStreams[i] != NULL) {
150             delete mStreams[i];
151             mStreams[i] = 0;
152         }
153     }
154     if (m_handle) {
155         m_camOps->delete_channel(m_camHandle, m_handle);
156         ALOGE("%s: deleting channel %d", __func__, m_handle);
157         m_handle = 0;
158     }
159     m_numStreams = 0;
160 }
161 
162 /*===========================================================================
163  * FUNCTION   : init
164  *
165  * DESCRIPTION: initialization of channel
166  *
167  * PARAMETERS :
168  *   @attr    : channel bundle attribute setting
169  *   @dataCB  : data notify callback
170  *   @userData: user data ptr
171  *
172  * RETURN     : int32_t type of status
173  *              NO_ERROR  -- success
174  *              none-zero failure code
175  *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)176 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
177                              mm_camera_buf_notify_t dataCB)
178 {
179     m_handle = m_camOps->add_channel(m_camHandle,
180                                       attr,
181                                       dataCB,
182                                       this);
183     if (m_handle == 0) {
184         ALOGE("%s: Add channel failed", __func__);
185         return UNKNOWN_ERROR;
186     }
187     return NO_ERROR;
188 }
189 
190 /*===========================================================================
191  * FUNCTION   : addStream
192  *
193  * DESCRIPTION: add a stream into channel
194  *
195  * PARAMETERS :
196  *   @streamDim      : dimensions of the stream
197  *   @streamRotation : rotation of the stream
198  *   @minStreamBufNum: number of stream buffers needed
199  *
200  * RETURN     : int32_t type of status
201  *              NO_ERROR  -- success
202  *              none-zero failure code
203  *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,cam_rotation_t streamRotation,uint8_t minStreamBufNum,uint32_t postprocessMask,cam_is_type_t isType)204 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
205                                   cam_format_t streamFormat,
206                                   cam_dimension_t streamDim,
207                                   cam_rotation_t streamRotation,
208                                   uint8_t minStreamBufNum,
209                                   uint32_t postprocessMask,
210                                   cam_is_type_t isType)
211 {
212     int32_t rc = NO_ERROR;
213 
214     if (m_numStreams >= 1) {
215         ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
216         return BAD_VALUE;
217     }
218 
219     if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
220         ALOGE("%s: stream number (%d) exceeds max limit (%d)",
221               __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
222         return BAD_VALUE;
223     }
224     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
225                                                m_handle,
226                                                m_camOps,
227                                                mPaddingInfo,
228                                                this);
229     if (pStream == NULL) {
230         ALOGE("%s: No mem for Stream", __func__);
231         return NO_MEMORY;
232     }
233 
234     rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
235             NULL, minStreamBufNum, postprocessMask, isType, streamCbRoutine,
236             this);
237     if (rc == 0) {
238         mStreams[m_numStreams] = pStream;
239         m_numStreams++;
240     } else {
241         delete pStream;
242     }
243     return rc;
244 }
245 
246 /*===========================================================================
247  * FUNCTION   : start
248  *
249  * DESCRIPTION: start channel, which will start all streams belong to this channel
250  *
251  * PARAMETERS :
252  *
253  * RETURN     : int32_t type of status
254  *              NO_ERROR  -- success
255  *              none-zero failure code
256  *==========================================================================*/
start()257 int32_t QCamera3Channel::start()
258 {
259     ATRACE_CALL();
260     int32_t rc = NO_ERROR;
261 
262     if (m_numStreams > 1) {
263         ALOGE("%s: bundle not supported", __func__);
264     } else if (m_numStreams == 0) {
265         return NO_INIT;
266     }
267 
268     if(m_bIsActive) {
269         ALOGD("%s: Attempt to start active channel", __func__);
270         return rc;
271     }
272 
273     for (int i = 0; i < m_numStreams; i++) {
274         if (mStreams[i] != NULL) {
275             mStreams[i]->start();
276         }
277     }
278     rc = m_camOps->start_channel(m_camHandle, m_handle);
279 
280     if (rc != NO_ERROR) {
281         for (int i = 0; i < m_numStreams; i++) {
282             if (mStreams[i] != NULL) {
283                 mStreams[i]->stop();
284             }
285         }
286     } else {
287         m_bIsActive = true;
288     }
289 
290     return rc;
291 }
292 
293 /*===========================================================================
294  * FUNCTION   : stop
295  *
296  * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
297  *
298  * PARAMETERS : none
299  *
300  * RETURN     : int32_t type of status
301  *              NO_ERROR  -- success
302  *              none-zero failure code
303  *==========================================================================*/
stop()304 int32_t QCamera3Channel::stop()
305 {
306     ATRACE_CALL();
307     int32_t rc = NO_ERROR;
308     if(!m_bIsActive) {
309         ALOGE("%s: Attempt to stop inactive channel",__func__);
310         return rc;
311     }
312 
313     for (int i = 0; i < m_numStreams; i++) {
314         if (mStreams[i] != NULL) {
315             mStreams[i]->stop();
316         }
317     }
318 
319     rc = m_camOps->stop_channel(m_camHandle, m_handle);
320 
321     m_bIsActive = false;
322     return rc;
323 }
324 
325 /*===========================================================================
326  * FUNCTION   : bufDone
327  *
328  * DESCRIPTION: return a stream buf back to kernel
329  *
330  * PARAMETERS :
331  *   @recvd_frame  : stream buf frame to be returned
332  *
333  * RETURN     : int32_t type of status
334  *              NO_ERROR  -- success
335  *              none-zero failure code
336  *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)337 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
338 {
339     int32_t rc = NO_ERROR;
340     for (int i = 0; i < recvd_frame->num_bufs; i++) {
341          if (recvd_frame->bufs[i] != NULL) {
342              for (int j = 0; j < m_numStreams; j++) {
343                  if (mStreams[j] != NULL &&
344                      mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
345                      rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
346                      break; // break loop j
347                  }
348              }
349          }
350     }
351 
352     return rc;
353 }
354 
355 /*===========================================================================
356  * FUNCTION   : getStreamTypeMask
357  *
358  * DESCRIPTION: Get bit mask of all stream types in this channel
359  *
360  * PARAMETERS : None
361  *
362  * RETURN     : Bit mask of all stream types in this channel
363  *==========================================================================*/
getStreamTypeMask()364 uint32_t QCamera3Channel::getStreamTypeMask()
365 {
366     uint32_t mask = 0;
367     for (int i = 0; i < m_numStreams; i++) {
368        mask |= (0x1 << mStreams[i]->getMyType());
369     }
370     return mask;
371 }
372 
373 /*===========================================================================
374  * FUNCTION   : getStreamID
375  *
376  * DESCRIPTION: Get StreamID of requested stream type
377  *
378  * PARAMETERS : streamMask
379  *
380  * RETURN     : Stream ID
381  *==========================================================================*/
getStreamID(uint32_t streamMask)382 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
383 {
384     uint32_t streamID = 0;
385     for (int i = 0; i < m_numStreams; i++) {
386         if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
387             streamID = mStreams[i]->getMyServerID();
388             break;
389         }
390     }
391     return streamID;
392 }
393 
394 /*===========================================================================
395  * FUNCTION   : getStreamByHandle
396  *
397  * DESCRIPTION: return stream object by stream handle
398  *
399  * PARAMETERS :
400  *   @streamHandle : stream handle
401  *
402  * RETURN     : stream object. NULL if not found
403  *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)404 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
405 {
406     for (int i = 0; i < m_numStreams; i++) {
407         if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
408             return mStreams[i];
409         }
410     }
411     return NULL;
412 }
413 
414 /*===========================================================================
415  * FUNCTION   : getStreamByIndex
416  *
417  * DESCRIPTION: return stream object by index
418  *
419  * PARAMETERS :
420  *   @streamHandle : stream handle
421  *
422  * RETURN     : stream object. NULL if not found
423  *==========================================================================*/
getStreamByIndex(uint8_t index)424 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
425 {
426     if (index < m_numStreams) {
427         return mStreams[index];
428     }
429     return NULL;
430 }
431 
432 /*===========================================================================
433  * FUNCTION   : streamCbRoutine
434  *
435  * DESCRIPTION: callback routine for stream
436  *
437  * PARAMETERS :
438  *   @streamHandle : stream handle
439  *
440  * RETURN     : stream object. NULL if not found
441  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)442 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
443                 QCamera3Stream *stream, void *userdata)
444 {
445     QCamera3Channel *channel = (QCamera3Channel *)userdata;
446     if (channel == NULL) {
447         ALOGE("%s: invalid channel pointer", __func__);
448         return;
449     }
450     channel->streamCbRoutine(super_frame, stream);
451 }
452 
453 /*===========================================================================
454  * FUNCTION   : dumpYUV
455  *
456  * DESCRIPTION: function to dump the YUV data from ISP/pproc
457  *
458  * PARAMETERS :
459  *   @frame   : frame to be dumped
460  *   @dim     : dimension of the stream
461  *   @offset  : offset of the data
462  *   @name    : 1 if it is ISP output/pproc input, 2 if it is pproc output
463  *
464  * RETURN  :
465  *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t name)466 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
467                               cam_frame_len_offset_t offset, uint8_t name)
468 {
469    char buf[64];
470    memset(buf, 0, sizeof(buf));
471    static int counter = 0;
472    /* Note that the image dimension will be the unrotated stream dimension.
473     * If you feel that the image would have been rotated during reprocess
474     * then swap the dimensions while opening the file
475     * */
476    snprintf(buf, sizeof(buf), "/data/local/tmp/%d_%d_%d_%dx%d.yuv",
477             name, counter, frame->frame_idx, dim.width, dim.height);
478    counter++;
479    int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
480    if (file_fd >= 0) {
481       int written_len = write(file_fd, frame->buffer, offset.frame_len);
482       ALOGE("%s: written number of bytes %d", __func__, written_len);
483       close(file_fd);
484    } else {
485       ALOGE("%s: failed to open file to dump image", __func__);
486    }
487 
488 }
489 
490 /*===========================================================================
491  * FUNCTION   : QCamera3RegularChannel
492  *
493  * DESCRIPTION: constructor of QCamera3RegularChannel
494  *
495  * PARAMETERS :
496  *   @cam_handle : camera handle
497  *   @cam_ops    : ptr to camera ops table
498  *   @cb_routine : callback routine to frame aggregator
499  *   @stream     : camera3_stream_t structure
500  *   @stream_type: Channel stream type
501  *
502  * RETURN     : none
503  *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,uint32_t postprocess_mask)504 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
505                     mm_camera_ops_t *cam_ops,
506                     channel_cb_routine cb_routine,
507                     cam_padding_info_t *paddingInfo,
508                     void *userData,
509                     camera3_stream_t *stream,
510                     cam_stream_type_t stream_type,
511                     uint32_t postprocess_mask) :
512                         QCamera3Channel(cam_handle, cam_ops, cb_routine,
513                                 paddingInfo, postprocess_mask, userData),
514                         mCamera3Stream(stream),
515                         mNumBufs(0),
516                         mStreamType(stream_type),
517                         mRotation(ROTATE_0)
518 {
519 }
520 
521 /*===========================================================================
522  * FUNCTION   : ~QCamera3RegularChannel
523  *
524  * DESCRIPTION: destructor of QCamera3RegularChannel
525  *
526  * PARAMETERS : none
527  *
528  * RETURN     : none
529  *==========================================================================*/
~QCamera3RegularChannel()530 QCamera3RegularChannel::~QCamera3RegularChannel()
531 {
532 }
533 
534 /*===========================================================================
535  * FUNCTION   : initialize
536  *
537  * DESCRIPTION: Initialize and add camera channel & stream
538  *
539  * PARAMETERS :
540  *
541  * RETURN     : int32_t type of status
542  *              NO_ERROR  -- success
543  *              none-zero failure code
544  *==========================================================================*/
545 
initialize(cam_is_type_t isType)546 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
547 {
548     return QCamera3RegularChannel::initialize(isType);
549 }
initialize(cam_is_type_t isType)550 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
551 {
552     ATRACE_CALL();
553     int32_t rc = NO_ERROR;
554     cam_format_t streamFormat;
555     cam_dimension_t streamDim;
556 
557     if (NULL == mCamera3Stream) {
558         ALOGE("%s: Camera stream uninitialized", __func__);
559         return NO_INIT;
560     }
561 
562     if (1 <= m_numStreams) {
563         // Only one stream per channel supported in v3 Hal
564         return NO_ERROR;
565     }
566 
567     rc = init(NULL, NULL);
568     if (rc < 0) {
569         ALOGE("%s: init failed", __func__);
570         return rc;
571     }
572 
573     mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
574     mIsType  = isType;
575 
576     if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
577         if (mStreamType ==  CAM_STREAM_TYPE_VIDEO) {
578             streamFormat = VIDEO_FORMAT;
579         } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
580             streamFormat = PREVIEW_FORMAT;
581         } else {
582             //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
583             // to be properly aligned and padded.
584             streamFormat = DEFAULT_FORMAT;
585         }
586     } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
587          streamFormat = CALLBACK_FORMAT;
588     } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
589          mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW10 ||
590          mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
591          // Bayer pattern doesn't matter here.
592          // All CAMIF raw format uses 10bit.
593          streamFormat = RAW_FORMAT;
594     } else {
595         //TODO: Fail for other types of streams for now
596         ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
597         return -EINVAL;
598     }
599 
600     if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
601             (mStreamType == CAM_STREAM_TYPE_PREVIEW)) {
602         if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) &&
603                 ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) {
604             ALOGE("%s: attempting rotation %d when rotation is disabled",
605                     __func__,
606                     mCamera3Stream->rotation);
607             return -EINVAL;
608         }
609 
610         switch (mCamera3Stream->rotation) {
611         case CAMERA3_STREAM_ROTATION_0:
612             mRotation = ROTATE_0;
613             break;
614         case CAMERA3_STREAM_ROTATION_90: {
615             mRotation = ROTATE_90;
616             break;
617         }
618         case CAMERA3_STREAM_ROTATION_180:
619             mRotation = ROTATE_180;
620             break;
621         case CAMERA3_STREAM_ROTATION_270: {
622             mRotation = ROTATE_270;
623             break;
624         }
625         default:
626             ALOGE("%s: Unknown rotation: %d",
627                     __func__,
628                     mCamera3Stream->rotation);
629             return -EINVAL;
630         }
631     } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) {
632         ALOGE("%s: Rotation %d is not supported by stream type %d",
633                 __func__,
634                 mCamera3Stream->rotation,
635                 mStreamType);
636         return -EINVAL;
637     }
638 
639     streamDim.width = mCamera3Stream->width;
640     streamDim.height = mCamera3Stream->height;
641 
642     rc = QCamera3Channel::addStream(mStreamType,
643             streamFormat,
644             streamDim,
645             mRotation,
646             mNumBufs,
647             mPostProcMask,
648             mIsType);
649 
650     return rc;
651 }
652 
653 /*===========================================================================
654 * FUNCTION   : start
655 *
656 * DESCRIPTION: start a regular channel
657 *
658 * PARAMETERS :
659 *
660 * RETURN     : int32_t type of status
661 *              NO_ERROR  -- success
662 *              none-zero failure code
663 *==========================================================================*/
start()664 int32_t QCamera3RegularChannel::start()
665 {
666     ATRACE_CALL();
667     int32_t rc = NO_ERROR;
668 
669     if (0 < mMemory.getCnt()) {
670         rc = QCamera3Channel::start();
671     }
672     return rc;
673 }
674 
675 /*===========================================================================
676  * FUNCTION   : request
677  *
678  * DESCRIPTION: process a request from camera service. Stream on if ncessary.
679  *
680  * PARAMETERS :
681  *   @buffer  : buffer to be filled for this request
682  *
683  * RETURN     : 0 on a success start of capture
684  *              -EINVAL on invalid input
685  *              -ENODEV on serious error
686  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)687 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
688 {
689     ATRACE_CALL();
690     //FIX ME: Return buffer back in case of failures below.
691 
692     int32_t rc = NO_ERROR;
693     int index;
694 
695     if (NULL == buffer) {
696         ALOGE("%s: Invalid buffer in channel request", __func__);
697         return BAD_VALUE;
698     }
699 
700     if(!m_bIsActive) {
701         rc = registerBuffer(buffer, mIsType);
702         if (NO_ERROR != rc) {
703             ALOGE("%s: On-the-fly buffer registration failed %d",
704                     __func__, rc);
705             return rc;
706         }
707 
708         rc = start();
709         if (NO_ERROR != rc) {
710             return rc;
711         }
712     } else {
713         CDBG("%s: Request on an existing stream",__func__);
714     }
715 
716     index = mMemory.getMatchBufIndex((void*)buffer);
717     if(index < 0) {
718         rc = registerBuffer(buffer, mIsType);
719         if (NO_ERROR != rc) {
720             ALOGE("%s: On-the-fly buffer registration failed %d",
721                     __func__, rc);
722             return rc;
723         }
724 
725         index = mMemory.getMatchBufIndex((void*)buffer);
726         if (index < 0) {
727             ALOGE("%s: Could not find object among registered buffers",
728                     __func__);
729             return DEAD_OBJECT;
730         }
731     }
732 
733     rc = mStreams[0]->bufDone(index);
734     if(rc != NO_ERROR) {
735         ALOGE("%s: Failed to Q new buffer to stream",__func__);
736         return rc;
737     }
738 
739     rc = mMemory.markFrameNumber(index, frameNumber);
740     return rc;
741 }
742 
743 /*===========================================================================
744  * FUNCTION   : registerBuffer
745  *
746  * DESCRIPTION: register streaming buffer to the channel object
747  *
748  * PARAMETERS :
749  *   @buffer     : buffer to be registered
750  *
751  * RETURN     : int32_t type of status
752  *              NO_ERROR  -- success
753  *              none-zero failure code
754  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)755 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer,
756         cam_is_type_t isType)
757 {
758     ATRACE_CALL();
759     int rc = 0;
760     mIsType = isType;
761     cam_stream_type_t streamType;
762 
763     if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
764         ALOGE("%s: Trying to register more buffers than initially requested",
765                 __func__);
766         return BAD_VALUE;
767     }
768 
769     if (0 == m_numStreams) {
770         rc = initialize(mIsType);
771         if (rc != NO_ERROR) {
772             ALOGE("%s: Couldn't initialize camera stream %d",
773                     __func__, rc);
774             return rc;
775         }
776     }
777 
778     streamType = mStreams[0]->getMyType();
779     rc = mMemory.registerBuffer(buffer, streamType);
780     if (ALREADY_EXISTS == rc) {
781         return NO_ERROR;
782     } else if (NO_ERROR != rc) {
783         ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
784         return rc;
785     }
786 
787     return rc;
788 }
789 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)790 void QCamera3RegularChannel::streamCbRoutine(
791                             mm_camera_super_buf_t *super_frame,
792                             QCamera3Stream *stream)
793 {
794     ATRACE_CALL();
795     //FIXME Q Buf back in case of error?
796     uint8_t frameIndex;
797     buffer_handle_t *resultBuffer;
798     int32_t resultFrameNumber;
799     camera3_stream_buffer_t result;
800 
801     if (NULL == stream) {
802         ALOGE("%s: Invalid stream", __func__);
803         return;
804     }
805 
806     if(!super_frame) {
807          ALOGE("%s: Invalid Super buffer",__func__);
808          return;
809     }
810 
811     if(super_frame->num_bufs != 1) {
812          ALOGE("%s: Multiple streams are not supported",__func__);
813          return;
814     }
815     if(super_frame->bufs[0] == NULL ) {
816          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
817                   __func__);
818          return;
819     }
820 
821     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
822     if(frameIndex >= mNumBufs) {
823          ALOGE("%s: Error, Invalid index for buffer",__func__);
824          stream->bufDone(frameIndex);
825          return;
826     }
827 
828     ////Use below data to issue framework callback
829     resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
830     resultFrameNumber = mMemory.getFrameNumber(frameIndex);
831 
832     result.stream = mCamera3Stream;
833     result.buffer = resultBuffer;
834     result.status = CAMERA3_BUFFER_STATUS_OK;
835     result.acquire_fence = -1;
836     result.release_fence = -1;
837     int32_t rc = stream->bufRelease(frameIndex);
838     if (NO_ERROR != rc) {
839         ALOGE("%s: Error %d releasing stream buffer %d",
840                 __func__, rc, frameIndex);
841     }
842 
843     rc = mMemory.unregisterBuffer(frameIndex);
844     if (NO_ERROR != rc) {
845         ALOGE("%s: Error %d unregistering stream buffer %d",
846                 __func__, rc, frameIndex);
847     }
848 
849     if (0 <= resultFrameNumber){
850         mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, mUserData);
851     } else {
852         ALOGE("%s: Bad frame number", __func__);
853     }
854 
855     free(super_frame);
856     return;
857 }
858 
getStreamBufs(uint32_t)859 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
860 {
861     return &mMemory;
862 }
863 
putStreamBufs()864 void QCamera3RegularChannel::putStreamBufs()
865 {
866     mMemory.unregisterBuffers();
867 }
868 
869 int QCamera3RegularChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
870 
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)871 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
872                     mm_camera_ops_t *cam_ops,
873                     channel_cb_routine cb_routine,
874                     cam_padding_info_t *paddingInfo,
875                     uint32_t postprocess_mask,
876                     void *userData) :
877                         QCamera3Channel(cam_handle, cam_ops,
878                                 cb_routine, paddingInfo, postprocess_mask, userData),
879                         mMemory(NULL)
880 {
881 }
882 
~QCamera3MetadataChannel()883 QCamera3MetadataChannel::~QCamera3MetadataChannel()
884 {
885     if (m_bIsActive)
886         stop();
887 
888     if (mMemory) {
889         mMemory->deallocate();
890         delete mMemory;
891         mMemory = NULL;
892     }
893 }
894 
initialize(cam_is_type_t isType)895 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
896 {
897     ATRACE_CALL();
898     int32_t rc;
899     cam_dimension_t streamDim;
900 
901     if (mMemory || m_numStreams > 0) {
902         ALOGE("%s: metadata channel already initialized", __func__);
903         return -EINVAL;
904     }
905 
906     rc = init(NULL, NULL);
907     if (rc < 0) {
908         ALOGE("%s: init failed", __func__);
909         return rc;
910     }
911     mIsType = isType;
912     streamDim.width = sizeof(metadata_buffer_t),
913     streamDim.height = 1;
914     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
915             streamDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM, mPostProcMask,
916             mIsType);
917     if (rc < 0) {
918         ALOGE("%s: addStream failed", __func__);
919     }
920     return rc;
921 }
922 
request(buffer_handle_t *,uint32_t)923 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
924                                                 uint32_t /*frameNumber*/)
925 {
926     if (!m_bIsActive) {
927         return start();
928     }
929     else
930         return 0;
931 }
932 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)933 void QCamera3MetadataChannel::streamCbRoutine(
934                         mm_camera_super_buf_t *super_frame,
935                         QCamera3Stream * /*stream*/)
936 {
937     ATRACE_CALL();
938     uint32_t requestNumber = 0;
939     if (super_frame == NULL || super_frame->num_bufs != 1) {
940         ALOGE("%s: super_frame is not valid", __func__);
941         return;
942     }
943     mChannelCB(super_frame, NULL, requestNumber, mUserData);
944 }
945 
getStreamBufs(uint32_t len)946 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
947 {
948     int rc;
949     if (len < sizeof(metadata_buffer_t)) {
950         ALOGE("%s: Metadata buffer size less than structure %d vs %d",
951                 __func__,
952                 len,
953                 sizeof(metadata_buffer_t));
954         return NULL;
955     }
956     mMemory = new QCamera3HeapMemory();
957     if (!mMemory) {
958         ALOGE("%s: unable to create metadata memory", __func__);
959         return NULL;
960     }
961     rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
962     if (rc < 0) {
963         ALOGE("%s: unable to allocate metadata memory", __func__);
964         delete mMemory;
965         mMemory = NULL;
966         return NULL;
967     }
968     memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
969     return mMemory;
970 }
971 
putStreamBufs()972 void QCamera3MetadataChannel::putStreamBufs()
973 {
974     mMemory->deallocate();
975     delete mMemory;
976     mMemory = NULL;
977 }
978 /*************************************************************************************/
979 // RAW Channel related functions
980 int QCamera3RawChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
981 
QCamera3RawChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool raw_16)982 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
983                     mm_camera_ops_t *cam_ops,
984                     channel_cb_routine cb_routine,
985                     cam_padding_info_t *paddingInfo,
986                     void *userData,
987                     camera3_stream_t *stream,
988                     uint32_t postprocess_mask,
989                     bool raw_16) :
990                         QCamera3RegularChannel(cam_handle, cam_ops,
991                                 cb_routine, paddingInfo, userData, stream,
992                                 CAM_STREAM_TYPE_RAW, postprocess_mask),
993                         mIsRaw16(raw_16)
994 {
995     char prop[PROPERTY_VALUE_MAX];
996     property_get("persist.camera.raw.debug.dump", prop, "0");
997     mRawDump = atoi(prop);
998 }
999 
~QCamera3RawChannel()1000 QCamera3RawChannel::~QCamera3RawChannel()
1001 {
1002 }
1003 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1004 void QCamera3RawChannel::streamCbRoutine(
1005                         mm_camera_super_buf_t *super_frame,
1006                         QCamera3Stream * stream)
1007 {
1008     ATRACE_CALL();
1009     /* Move this back down once verified */
1010     if (mRawDump)
1011         dumpRawSnapshot(super_frame->bufs[0]);
1012 
1013     if (mIsRaw16) {
1014         if (RAW_FORMAT == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
1015             convertMipiToRaw16(super_frame->bufs[0]);
1016         else
1017             convertLegacyToRaw16(super_frame->bufs[0]);
1018     }
1019 
1020     //Make sure cache coherence because extra processing is done
1021     mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
1022 
1023     QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
1024     return;
1025 }
1026 
dumpRawSnapshot(mm_camera_buf_def_t * frame)1027 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
1028 {
1029    QCamera3Stream *stream = getStreamByIndex(0);
1030    char buf[32];
1031    memset(buf, 0, sizeof(buf));
1032    cam_dimension_t dim;
1033    memset(&dim, 0, sizeof(dim));
1034    stream->getFrameDimension(dim);
1035 
1036    cam_frame_len_offset_t offset;
1037    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1038    stream->getFrameOffset(offset);
1039    snprintf(buf, sizeof(buf), "/data/local/tmp/r_%d_%dx%d.raw",
1040             frame->frame_idx, dim.width, dim.height);
1041 
1042    int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
1043    if (file_fd >= 0) {
1044       int written_len = write(file_fd, frame->buffer, offset.frame_len);
1045       ALOGE("%s: written number of bytes %d", __func__, written_len);
1046       close(file_fd);
1047    } else {
1048       ALOGE("%s: failed to open file to dump image", __func__);
1049    }
1050 
1051 }
1052 
convertLegacyToRaw16(mm_camera_buf_def_t * frame)1053 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
1054 {
1055     // Convert image buffer from Opaque raw format to RAW16 format
1056     // 10bit Opaque raw is stored in the format of:
1057     // 0000 - p5 - p4 - p3 - p2 - p1 - p0
1058     // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
1059     // 4 bits are 0s. Each 64bit word contains 6 pixels.
1060 
1061     QCamera3Stream *stream = getStreamByIndex(0);
1062     cam_dimension_t dim;
1063     memset(&dim, 0, sizeof(dim));
1064     stream->getFrameDimension(dim);
1065 
1066     cam_frame_len_offset_t offset;
1067     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1068     stream->getFrameOffset(offset);
1069 
1070     uint32_t raw16_stride = (dim.width + 15) & ~15;
1071     uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1072 
1073     // In-place format conversion.
1074     // Raw16 format always occupy more memory than opaque raw10.
1075     // Convert to Raw16 by iterating through all pixels from bottom-right
1076     // to top-left of the image.
1077     // One special notes:
1078     // 1. Cross-platform raw16's stride is 16 pixels.
1079     // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
1080     for (int y = dim.height-1; y >= 0; y--) {
1081         uint64_t* row_start = (uint64_t *)frame->buffer +
1082             y * offset.mp[0].stride / 8;
1083         for (int x = dim.width-1;  x >= 0; x--) {
1084             uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
1085             raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1086         }
1087     }
1088 }
1089 
convertMipiToRaw16(mm_camera_buf_def_t * frame)1090 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
1091 {
1092     // Convert image buffer from mipi10 raw format to RAW16 format
1093     // mipi10 opaque raw is stored in the format of:
1094     // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
1095     // 4 pixels occupy 5 bytes, no padding needed
1096 
1097     QCamera3Stream *stream = getStreamByIndex(0);
1098     cam_dimension_t dim;
1099     memset(&dim, 0, sizeof(dim));
1100     stream->getFrameDimension(dim);
1101 
1102     cam_frame_len_offset_t offset;
1103     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1104     stream->getFrameOffset(offset);
1105 
1106     uint32_t raw16_stride = (dim.width + 15) & ~15;
1107     uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1108 
1109     // In-place format conversion.
1110     // Raw16 format always occupy more memory than opaque raw10.
1111     // Convert to Raw16 by iterating through all pixels from bottom-right
1112     // to top-left of the image.
1113     // One special notes:
1114     // 1. Cross-platform raw16's stride is 16 pixels.
1115     // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
1116     for (int y = dim.height-1; y >= 0; y--) {
1117         uint8_t* row_start = (uint8_t *)frame->buffer +
1118             y * offset.mp[0].stride;
1119         for (int x = dim.width-1;  x >= 0; x--) {
1120             uint8_t upper_8bit = row_start[5*(x/4)+x%4];
1121             uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3);
1122             uint16_t raw16_pixel = (((uint16_t)upper_8bit)<<2 | lower_2bit);
1123             raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1124         }
1125     }
1126 
1127 }
1128 
1129 
1130 /*************************************************************************************/
1131 // RAW Dump Channel related functions
1132 
1133 int QCamera3RawDumpChannel::kMaxBuffers = 3;
1134 /*===========================================================================
1135  * FUNCTION   : QCamera3RawDumpChannel
1136  *
1137  * DESCRIPTION: Constructor for RawDumpChannel
1138  *
1139  * PARAMETERS :
1140  *   @cam_handle    : Handle for Camera
1141  *   @cam_ops       : Function pointer table
1142  *   @rawDumpSize   : Dimensions for the Raw stream
1143  *   @paddinginfo   : Padding information for stream
1144  *   @userData      : Cookie for parent
1145  *   @pp mask       : PP feature mask for this stream
1146  *
1147  * RETURN           : NA
1148  *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,uint32_t postprocess_mask)1149 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
1150                     mm_camera_ops_t *cam_ops,
1151                     cam_dimension_t rawDumpSize,
1152                     cam_padding_info_t *paddingInfo,
1153                     void *userData,
1154                     uint32_t postprocess_mask) :
1155                         QCamera3Channel(cam_handle, cam_ops, NULL,
1156                                 paddingInfo, postprocess_mask, userData),
1157                         mDim(rawDumpSize),
1158                         mMemory(NULL)
1159 {
1160     char prop[PROPERTY_VALUE_MAX];
1161     property_get("persist.camera.raw.dump", prop, "0");
1162     mRawDump = atoi(prop);
1163 }
1164 
1165 /*===========================================================================
1166  * FUNCTION   : QCamera3RawDumpChannel
1167  *
1168  * DESCRIPTION: Destructor for RawDumpChannel
1169  *
1170  * PARAMETERS :
1171  *
1172  * RETURN           : NA
1173  *==========================================================================*/
1174 
~QCamera3RawDumpChannel()1175 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
1176 {
1177 }
1178 
1179 /*===========================================================================
1180  * FUNCTION   : dumpRawSnapshot
1181  *
1182  * DESCRIPTION: Helper function to dump Raw frames
1183  *
1184  * PARAMETERS :
1185  *  @frame      : stream buf frame to be dumped
1186  *
1187  *  RETURN      : NA
1188  *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)1189 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
1190 {
1191     QCamera3Stream *stream = getStreamByIndex(0);
1192     char buf[128];
1193     struct timeval tv;
1194     struct tm *timeinfo;
1195 
1196     cam_dimension_t dim;
1197     memset(&dim, 0, sizeof(dim));
1198     stream->getFrameDimension(dim);
1199 
1200     cam_frame_len_offset_t offset;
1201     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1202     stream->getFrameOffset(offset);
1203 
1204     gettimeofday(&tv, NULL);
1205     timeinfo = localtime(&tv.tv_sec);
1206 
1207     memset(buf, 0, sizeof(buf));
1208     snprintf(buf, sizeof(buf),
1209                  "/data/%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
1210                  timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1211                  timeinfo->tm_mday, timeinfo->tm_hour,
1212                  timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
1213                  frame->frame_idx, dim.width, dim.height);
1214 
1215     int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
1216     if (file_fd >= 0) {
1217         int written_len = write(file_fd, frame->buffer, offset.frame_len);
1218         CDBG("%s: written number of bytes %d", __func__, written_len);
1219         close(file_fd);
1220     } else {
1221         ALOGE("%s: failed to open file to dump image", __func__);
1222     }
1223 }
1224 
1225 /*===========================================================================
1226  * FUNCTION   : streamCbRoutine
1227  *
1228  * DESCRIPTION: Callback routine invoked for each frame generated for
1229  *              Rawdump channel
1230  *
1231  * PARAMETERS :
1232  *   @super_frame  : stream buf frame generated
1233  *   @stream       : Underlying Stream object cookie
1234  *
1235  * RETURN          : NA
1236  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1237 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1238                                                 QCamera3Stream *stream)
1239 {
1240     CDBG("%s: E",__func__);
1241     if (super_frame == NULL || super_frame->num_bufs != 1) {
1242         ALOGE("%s: super_frame is not valid", __func__);
1243         return;
1244     }
1245 
1246     if (mRawDump)
1247         dumpRawSnapshot(super_frame->bufs[0]);
1248 
1249     bufDone(super_frame);
1250     free(super_frame);
1251 }
1252 
1253 /*===========================================================================
1254  * FUNCTION   : getStreamBufs
1255  *
1256  * DESCRIPTION: Callback function provided to interface to get buffers.
1257  *
1258  * PARAMETERS :
1259  *   @len       : Length of each buffer to be allocated
1260  *
1261  * RETURN     : NULL on buffer allocation failure
1262  *              QCamera3Memory object on sucess
1263  *==========================================================================*/
getStreamBufs(uint32_t len)1264 QCamera3Memory* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
1265 {
1266     int rc;
1267     mMemory = new QCamera3HeapMemory();
1268 
1269     if (!mMemory) {
1270         ALOGE("%s: unable to create heap memory", __func__);
1271         return NULL;
1272     }
1273     rc = mMemory->allocate(kMaxBuffers, len, true);
1274     if (rc < 0) {
1275         ALOGE("%s: unable to allocate heap memory", __func__);
1276         delete mMemory;
1277         mMemory = NULL;
1278         return NULL;
1279     }
1280     return mMemory;
1281 }
1282 
1283 /*===========================================================================
1284  * FUNCTION   : putStreamBufs
1285  *
1286  * DESCRIPTION: Callback function provided to interface to return buffers.
1287  *              Although no handles are actually returned, implicitl assumption
1288  *              that interface will no longer use buffers and channel can
1289  *              deallocated if necessary.
1290  *
1291  * PARAMETERS : NA
1292  *
1293  * RETURN     : NA
1294  *==========================================================================*/
putStreamBufs()1295 void QCamera3RawDumpChannel::putStreamBufs()
1296 {
1297     mMemory->deallocate();
1298     delete mMemory;
1299     mMemory = NULL;
1300 }
1301 
1302 /*===========================================================================
1303  * FUNCTION : request
1304  *
1305  * DESCRIPTION: Request function used as trigger
1306  *
1307  * PARAMETERS :
1308  * @recvd_frame : buffer- this will be NULL since this is internal channel
1309  * @frameNumber : Undefined again since this is internal stream
1310  *
1311  * RETURN     : int32_t type of status
1312  *              NO_ERROR  -- success
1313  *              none-zero failure code
1314  *==========================================================================*/
request(buffer_handle_t *,uint32_t)1315 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
1316                                                 uint32_t /*frameNumber*/)
1317 {
1318     if (!m_bIsActive) {
1319         return QCamera3Channel::start();
1320     }
1321     else
1322         return 0;
1323 }
1324 
1325 /*===========================================================================
1326  * FUNCTION : intialize
1327  *
1328  * DESCRIPTION: Initializes channel params and creates underlying stream
1329  *
1330  * PARAMETERS : NA
1331  *
1332  * RETURN     : int32_t type of status
1333  *              NO_ERROR  -- success
1334  *              none-zero failure code
1335  *==========================================================================*/
initialize(cam_is_type_t isType)1336 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
1337 {
1338     int32_t rc;
1339 
1340     rc = init(NULL, NULL);
1341     if (rc < 0) {
1342         ALOGE("%s: init failed", __func__);
1343         return rc;
1344     }
1345     mIsType = isType;
1346     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
1347             CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, kMaxBuffers,
1348             mPostProcMask, mIsType);
1349     if (rc < 0) {
1350         ALOGE("%s: addStream failed", __func__);
1351     }
1352     return rc;
1353 }
1354 /*************************************************************************************/
1355 
1356 /*===========================================================================
1357  * FUNCTION   : jpegEvtHandle
1358  *
1359  * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
1360                 Construct result payload and call mChannelCb to deliver buffer
1361                 to framework.
1362  *
1363  * PARAMETERS :
1364  *   @status    : status of jpeg job
1365  *   @client_hdl: jpeg client handle
1366  *   @jobId     : jpeg job Id
1367  *   @p_ouput   : ptr to jpeg output result struct
1368  *   @userdata  : user data ptr
1369  *
1370  * RETURN     : none
1371  *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)1372 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
1373                                               uint32_t /*client_hdl*/,
1374                                               uint32_t jobId,
1375                                               mm_jpeg_output_t *p_output,
1376                                               void *userdata)
1377 {
1378     ATRACE_CALL();
1379     buffer_handle_t *resultBuffer, *jpegBufferHandle;
1380     int32_t resultFrameNumber;
1381     int resultStatus = CAMERA3_BUFFER_STATUS_OK;
1382     camera3_stream_buffer_t result;
1383     camera3_jpeg_blob_t jpegHeader;
1384     char* jpeg_eof = 0;
1385     int maxJpegSize;
1386     int32_t bufIdx;
1387 
1388     QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
1389     if (obj) {
1390         //Construct payload for process_capture_result. Call mChannelCb
1391 
1392         qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
1393 
1394         if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
1395             ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
1396             resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
1397         }
1398 
1399         bufIdx = job->jpeg_settings->out_buf_index;
1400         CDBG("%s: jpeg out_buf_index: %d", __func__, bufIdx);
1401 
1402         //Construct jpeg transient header of type camera3_jpeg_blob_t
1403         //Append at the end of jpeg image of buf_filled_len size
1404 
1405         jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
1406         jpegHeader.jpeg_size = p_output->buf_filled_len;
1407 
1408 
1409         char* jpeg_buf = (char *)p_output->buf_vaddr;
1410 
1411         // Gralloc buffer may have additional padding for 4K page size
1412         // Follow size guidelines based on spec since framework relies
1413         // on that to reach end of buffer and with it the header
1414 
1415         //Handle same as resultBuffer, but for readablity
1416         jpegBufferHandle =
1417             (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1418 
1419         maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
1420         if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
1421             maxJpegSize = obj->mMemory.getSize(bufIdx);
1422         }
1423 
1424         jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
1425         memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
1426         obj->mMemory.cleanInvalidateCache(bufIdx);
1427 
1428         ////Use below data to issue framework callback
1429         resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1430         resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
1431         int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
1432         if (NO_ERROR != rc) {
1433             ALOGE("%s: Error %d unregistering stream buffer %d",
1434                     __func__, rc, bufIdx);
1435         }
1436 
1437         result.stream = obj->mCamera3Stream;
1438         result.buffer = resultBuffer;
1439         result.status = resultStatus;
1440         result.acquire_fence = -1;
1441         result.release_fence = -1;
1442 
1443         // Release any snapshot buffers before calling
1444         // the user callback. The callback can potentially
1445         // unblock pending requests to snapshot stream.
1446         if (NULL != job) {
1447             int32_t snapshotIdx = -1;
1448             mm_camera_super_buf_t* src_frame = NULL;
1449 
1450             if (job->src_reproc_frame)
1451                 src_frame = job->src_reproc_frame;
1452             else
1453                 src_frame = job->src_frame;
1454 
1455             if (src_frame) {
1456                 if (obj->mStreams[0]->getMyHandle() ==
1457                         src_frame->bufs[0]->stream_id) {
1458                     snapshotIdx = src_frame->bufs[0]->buf_idx;
1459                 } else {
1460                     ALOGE("%s: Snapshot stream id %d and source frame %d don't match!",
1461                             __func__, obj->mStreams[0]->getMyHandle(),
1462                             src_frame->bufs[0]->stream_id);
1463                 }
1464             }
1465             if (0 <= snapshotIdx) {
1466                 Mutex::Autolock lock(obj->mFreeBuffersLock);
1467                 obj->mFreeBufferList.push_back(snapshotIdx);
1468             } else {
1469                 ALOGE("%s: Snapshot buffer not found!", __func__);
1470             }
1471         }
1472 
1473         CDBG("%s: Issue Callback", __func__);
1474         obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
1475 
1476         // release internal data for jpeg job
1477         if (job != NULL) {
1478             if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
1479                 obj->mOfflineMetaMemory.deallocate();
1480                 obj->mOfflineMemory.unregisterBuffers();
1481             }
1482             obj->m_postprocessor.releaseOfflineBuffers();
1483             obj->m_postprocessor.releaseJpegJobData(job);
1484             free(job);
1485         }
1486 
1487         return;
1488         // }
1489     } else {
1490         ALOGE("%s: Null userdata in jpeg callback", __func__);
1491     }
1492 }
1493 
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool is4KVideo,QCamera3Channel * metadataChannel)1494 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
1495                     mm_camera_ops_t *cam_ops,
1496                     channel_cb_routine cb_routine,
1497                     cam_padding_info_t *paddingInfo,
1498                     void *userData,
1499                     camera3_stream_t *stream,
1500                     uint32_t postprocess_mask,
1501                     bool is4KVideo,
1502                     QCamera3Channel *metadataChannel) :
1503                         QCamera3Channel(cam_handle, cam_ops, cb_routine,
1504                         paddingInfo, postprocess_mask, userData),
1505                         m_postprocessor(this),
1506                         mCamera3Stream(stream),
1507                         mNumBufsRegistered(CAM_MAX_NUM_BUFS_PER_STREAM),
1508                         mNumSnapshotBufs(0),
1509                         mCurrentBufIndex(-1),
1510                         mPostProcStarted(false),
1511                         mInputBufferConfig(false),
1512                         mYuvMemory(NULL),
1513                         m_pMetaChannel(metadataChannel),
1514                         mMetaFrame(NULL)
1515 {
1516     QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
1517     m_max_pic_dim = hal_obj->calcMaxJpegDim();
1518     mYuvWidth = stream->width;
1519     mYuvHeight = stream->height;
1520     // Use same pixelformat for 4K video case
1521     mStreamFormat = is4KVideo ? VIDEO_FORMAT : SNAPSHOT_FORMAT;
1522     mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
1523     int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, mPostProcMask,
1524             this);
1525     if (rc != 0) {
1526         ALOGE("Init Postprocessor failed");
1527     }
1528 }
1529 
1530 /*===========================================================================
1531  * FUNCTION   : stop
1532  *
1533  * DESCRIPTION: stop pic channel, which will stop all streams within, including
1534  *              the reprocessing channel in postprocessor and YUV stream.
1535  *
1536  * PARAMETERS : none
1537  *
1538  * RETURN     : int32_t type of status
1539  *              NO_ERROR  -- success
1540  *              none-zero failure code
1541  *==========================================================================*/
stop()1542 int32_t QCamera3PicChannel::stop()
1543 {
1544     int32_t rc = NO_ERROR;
1545     if(!m_bIsActive) {
1546         ALOGE("%s: Attempt to stop inactive channel",__func__);
1547         return rc;
1548     }
1549 
1550     m_postprocessor.stop();
1551     mPostProcStarted = false;
1552     rc |= QCamera3Channel::stop();
1553     return rc;
1554 }
1555 
~QCamera3PicChannel()1556 QCamera3PicChannel::~QCamera3PicChannel()
1557 {
1558    stop();
1559 
1560    int32_t rc = m_postprocessor.stop();
1561    if (rc != NO_ERROR) {
1562        ALOGE("%s: Postprocessor stop failed", __func__);
1563    }
1564 
1565    rc = m_postprocessor.deinit();
1566    if (rc != 0) {
1567        ALOGE("De-init Postprocessor failed");
1568    }
1569 
1570    if (0 < mOfflineMetaMemory.getCnt()) {
1571        mOfflineMetaMemory.deallocate();
1572    }
1573    if (0 < mOfflineMemory.getCnt()) {
1574        mOfflineMemory.unregisterBuffers();
1575    }
1576 }
1577 
initialize(cam_is_type_t isType)1578 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
1579 {
1580     int32_t rc = NO_ERROR;
1581     cam_dimension_t streamDim;
1582     cam_stream_type_t streamType;
1583     cam_format_t streamFormat;
1584     mm_camera_channel_attr_t attr;
1585 
1586     if (NULL == mCamera3Stream) {
1587         ALOGE("%s: Camera stream uninitialized", __func__);
1588         return NO_INIT;
1589     }
1590 
1591     if (1 <= m_numStreams) {
1592         // Only one stream per channel supported in v3 Hal
1593         return NO_ERROR;
1594     }
1595 
1596     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1597     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
1598     attr.look_back = 1;
1599     attr.post_frame_skip = 1;
1600     attr.water_mark = 1;
1601     attr.max_unmatched_frames = 1;
1602 
1603     rc = init(&attr, NULL);
1604     if (rc < 0) {
1605         ALOGE("%s: init failed", __func__);
1606         return rc;
1607     }
1608     mIsType = isType;
1609 
1610     streamType = mStreamType;
1611     streamFormat = mStreamFormat;
1612     streamDim.width = mYuvWidth;
1613     streamDim.height = mYuvHeight;
1614 
1615     mNumSnapshotBufs = mCamera3Stream->max_buffers;
1616     rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
1617             ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask,
1618             mIsType);
1619 
1620     Mutex::Autolock lock(mFreeBuffersLock);
1621     mFreeBufferList.clear();
1622     for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
1623         mFreeBufferList.push_back(i);
1624     }
1625 
1626     return rc;
1627 }
1628 
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata)1629 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
1630         uint32_t frameNumber,
1631         camera3_stream_buffer_t *pInputBuffer,
1632         metadata_buffer_t *metadata)
1633 {
1634     ATRACE_CALL();
1635     //FIX ME: Return buffer back in case of failures below.
1636 
1637     int32_t rc = NO_ERROR;
1638     int index;
1639     //extract rotation information
1640 
1641     reprocess_config_t reproc_cfg;
1642     memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1643     reproc_cfg.padding = mPaddingInfo;
1644     //to ensure a big enough buffer size set the height and width
1645     //padding to max(height padding, width padding)
1646     if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1647        reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1648     } else {
1649        reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1650     }
1651     if (NULL != pInputBuffer) {
1652         reproc_cfg.input_stream_dim.width = pInputBuffer->stream->width;
1653         reproc_cfg.input_stream_dim.height = pInputBuffer->stream->height;
1654     } else {
1655         reproc_cfg.input_stream_dim.width = mYuvWidth;
1656         reproc_cfg.input_stream_dim.height = mYuvHeight;
1657         reproc_cfg.src_channel = this;
1658     }
1659     reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1660     reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1661     reproc_cfg.stream_type = mStreamType;
1662     reproc_cfg.stream_format = mStreamFormat;
1663     rc = mm_stream_calc_offset_snapshot(mStreamFormat, &reproc_cfg.input_stream_dim,
1664             reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1665     if (rc != 0) {
1666         ALOGE("%s: Snapshot stream plane info calculation failed!", __func__);
1667         return rc;
1668     }
1669     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
1670           int32_t *rotation = (int32_t *)POINTER_OF_PARAM(
1671             CAM_INTF_META_JPEG_ORIENTATION, metadata);
1672           if (*rotation == 0) {
1673              reproc_cfg.rotation = ROTATE_0;
1674           } else if (*rotation == 90) {
1675              reproc_cfg.rotation = ROTATE_90;
1676           } else if (*rotation == 180) {
1677              reproc_cfg.rotation = ROTATE_180;
1678           } else if (*rotation == 270) {
1679              reproc_cfg.rotation = ROTATE_270;
1680           }
1681     }
1682 
1683     // Picture stream has already been started before any request comes in
1684     if (!m_bIsActive) {
1685         ALOGE("%s: Channel not started!!", __func__);
1686         return NO_INIT;
1687     }
1688 
1689     index = mMemory.getMatchBufIndex((void*)buffer);
1690     if(index < 0) {
1691         rc = registerBuffer(buffer, mIsType);
1692         if (NO_ERROR != rc) {
1693             ALOGE("%s: On-the-fly buffer registration failed %d",
1694                     __func__, rc);
1695             return rc;
1696         }
1697 
1698         index = mMemory.getMatchBufIndex((void*)buffer);
1699         if (index < 0) {
1700             ALOGE("%s: Could not find object among registered buffers",__func__);
1701             return DEAD_OBJECT;
1702         }
1703     }
1704     CDBG("%s: buffer index %d, frameNumber: %u", __func__, index, frameNumber);
1705 
1706     rc = mMemory.markFrameNumber(index, frameNumber);
1707 
1708     //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1709     mCurrentBufIndex = index;
1710 
1711     // Start postprocessor
1712     // This component needs to be re-configured
1713     // once we switch from input(framework) buffer
1714     // reprocess to standard capture!
1715     bool restartNeeded = ((!mInputBufferConfig) != (NULL != pInputBuffer));
1716     if((!mPostProcStarted) || restartNeeded) {
1717         m_postprocessor.start(reproc_cfg, metadata);
1718         mPostProcStarted = true;
1719         mInputBufferConfig = (NULL == pInputBuffer);
1720     }
1721 
1722     // Queue jpeg settings
1723     rc = queueJpegSetting(index, metadata);
1724 
1725     if (pInputBuffer == NULL) {
1726         Mutex::Autolock lock(mFreeBuffersLock);
1727         if (!mFreeBufferList.empty()) {
1728             List<uint32_t>::iterator it = mFreeBufferList.begin();
1729             uint32_t freeBuffer = *it;
1730             mStreams[0]->bufDone(freeBuffer);
1731             mFreeBufferList.erase(it);
1732         } else {
1733             ALOGE("%s: No snapshot buffers available!", __func__);
1734             rc = NOT_ENOUGH_DATA;
1735         }
1736     } else {
1737         if (0 < mOfflineMetaMemory.getCnt()) {
1738             mOfflineMetaMemory.deallocate();
1739         }
1740         if (0 < mOfflineMemory.getCnt()) {
1741             mOfflineMemory.unregisterBuffers();
1742         }
1743 
1744         int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1745         if(input_index < 0) {
1746             rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
1747             if (NO_ERROR != rc) {
1748                 ALOGE("%s: On-the-fly input buffer registration failed %d",
1749                         __func__, rc);
1750                 return rc;
1751             }
1752 
1753             input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1754             if (input_index < 0) {
1755                 ALOGE("%s: Could not find object among registered buffers",__func__);
1756                 return DEAD_OBJECT;
1757             }
1758         }
1759         qcamera_fwk_input_pp_data_t *src_frame = NULL;
1760         src_frame = (qcamera_fwk_input_pp_data_t *)malloc(
1761                 sizeof(qcamera_fwk_input_pp_data_t));
1762         if (src_frame == NULL) {
1763             ALOGE("%s: No memory for src frame", __func__);
1764             return NO_MEMORY;
1765         }
1766         memset(src_frame, 0, sizeof(qcamera_fwk_input_pp_data_t));
1767         src_frame->src_frame = *pInputBuffer;
1768         rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info,
1769                 src_frame->input_buffer, input_index);
1770         if (rc != 0) {
1771             free(src_frame);
1772             return rc;
1773         }
1774         if (mYUVDump) {
1775            dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim,
1776                    reproc_cfg.input_stream_plane_info.plane_info, 1);
1777         }
1778         cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1779         cam_stream_buf_plane_info_t meta_planes;
1780         rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
1781         if (rc != 0) {
1782             ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
1783             free(src_frame);
1784             return rc;
1785         }
1786 
1787         rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
1788         if (NO_ERROR != rc) {
1789             ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
1790             free(src_frame);
1791             return rc;
1792         }
1793         mm_camera_buf_def_t meta_buf;
1794         cam_frame_len_offset_t offset = meta_planes.plane_info;
1795         rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
1796         if (NO_ERROR != rc) {
1797             free(src_frame);
1798             return rc;
1799         }
1800         memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1801         src_frame->metadata_buffer = meta_buf;
1802         src_frame->reproc_config = reproc_cfg;
1803 
1804         CDBG_HIGH("%s: Post-process started", __func__);
1805         CDBG_HIGH("%s: Issue call to reprocess", __func__);
1806 
1807         m_postprocessor.processData(src_frame);
1808     }
1809     return rc;
1810 }
1811 
1812 
1813 /*===========================================================================
1814  * FUNCTION : metadataBufDone
1815  *
1816  * DESCRIPTION: Buffer done method for a metadata buffer
1817  *
1818  * PARAMETERS :
1819  * @recvd_frame : received metadata frame
1820  *
1821  * RETURN     : int32_t type of status
1822  *              NO_ERROR  -- success
1823  *              none-zero failure code
1824  *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1825 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1826 {
1827     int32_t rc = NO_ERROR;;
1828     if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1829         ALOGE("%s: Metadata channel or metadata buffer invalid", __func__);
1830         return BAD_VALUE;
1831     }
1832 
1833     rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1834 
1835     return rc;
1836 }
1837 
1838 /*===========================================================================
1839  * FUNCTION   : dataNotifyCB
1840  *
1841  * DESCRIPTION: Channel Level callback used for super buffer data notify.
1842  *              This function is registered with mm-camera-interface to handle
1843  *              data notify
1844  *
1845  * PARAMETERS :
1846  *   @recvd_frame   : stream frame received
1847  *   userdata       : user data ptr
1848  *
1849  * RETURN     : none
1850  *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1851 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1852                                  void *userdata)
1853 {
1854     ATRACE_CALL();
1855     CDBG("%s: E\n", __func__);
1856     QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1857 
1858     if (channel == NULL) {
1859         ALOGE("%s: invalid channel pointer", __func__);
1860         return;
1861     }
1862 
1863     if(channel->m_numStreams != 1) {
1864         ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1865         return;
1866     }
1867 
1868 
1869     if(channel->mStreams[0] == NULL) {
1870         ALOGE("%s: Error: Invalid Stream object",__func__);
1871         return;
1872     }
1873 
1874     channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1875 
1876     CDBG("%s: X\n", __func__);
1877     return;
1878 }
1879 
1880 /*===========================================================================
1881  * FUNCTION   : registerBuffer
1882  *
1883  * DESCRIPTION: register streaming buffer to the channel object
1884  *
1885  * PARAMETERS :
1886  *   @buffer     : buffer to be registered
1887  *
1888  * RETURN     : int32_t type of status
1889  *              NO_ERROR  -- success
1890  *              none-zero failure code
1891  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)1892 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType)
1893 {
1894     int rc = 0;
1895     mIsType = isType;
1896     if ((uint32_t)mMemory.getCnt() > (mNumBufsRegistered - 1)) {
1897         ALOGE("%s: Trying to register more buffers than initially requested",
1898                 __func__);
1899         return BAD_VALUE;
1900     }
1901 
1902     if (0 == m_numStreams) {
1903         rc = initialize(mIsType);
1904         if (rc != NO_ERROR) {
1905             ALOGE("%s: Couldn't initialize camera stream %d",
1906                     __func__, rc);
1907             return rc;
1908         }
1909     }
1910 
1911     rc = mMemory.registerBuffer(buffer, mStreamType);
1912     if (ALREADY_EXISTS == rc) {
1913         return NO_ERROR;
1914     } else if (NO_ERROR != rc) {
1915         ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
1916         return rc;
1917     }
1918 
1919     CDBG("%s: X",__func__);
1920 
1921     return rc;
1922 }
1923 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1924 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1925                             QCamera3Stream *stream)
1926 {
1927     ATRACE_CALL();
1928     //TODO
1929     //Used only for getting YUV. Jpeg callback will be sent back from channel
1930     //directly to HWI. Refer to func jpegEvtHandle
1931 
1932     //Got the yuv callback. Calling yuv callback handler in PostProc
1933     uint8_t frameIndex;
1934     mm_camera_super_buf_t* frame = NULL;
1935     if(!super_frame) {
1936          ALOGE("%s: Invalid Super buffer",__func__);
1937          return;
1938     }
1939 
1940     if(super_frame->num_bufs != 1) {
1941          ALOGE("%s: Multiple streams are not supported",__func__);
1942          return;
1943     }
1944     if(super_frame->bufs[0] == NULL ) {
1945          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1946                   __func__);
1947          return;
1948     }
1949 
1950     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1951     CDBG("%s: recvd buf_idx: %u for further processing",
1952         __func__, (uint32_t)frameIndex);
1953     if(frameIndex >= mNumSnapshotBufs) {
1954          ALOGE("%s: Error, Invalid index for buffer",__func__);
1955          if(stream) {
1956              Mutex::Autolock lock(mFreeBuffersLock);
1957              mFreeBufferList.push_back(frameIndex);
1958              stream->bufDone(frameIndex);
1959          }
1960          return;
1961     }
1962 
1963     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1964     if (frame == NULL) {
1965        ALOGE("%s: Error allocating memory to save received_frame structure.",
1966                                                                     __func__);
1967        if(stream) {
1968            Mutex::Autolock lock(mFreeBuffersLock);
1969            mFreeBufferList.push_back(frameIndex);
1970            stream->bufDone(frameIndex);
1971        }
1972        return;
1973     }
1974     *frame = *super_frame;
1975 
1976     if(mYUVDump) {
1977         cam_dimension_t dim;
1978         memset(&dim, 0, sizeof(dim));
1979         stream->getFrameDimension(dim);
1980         cam_frame_len_offset_t offset;
1981         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1982         stream->getFrameOffset(offset);
1983         dumpYUV(frame->bufs[0], dim, offset, 1);
1984     }
1985 
1986     m_postprocessor.processData(frame);
1987     free(super_frame);
1988     return;
1989 }
1990 
getStreamBufs(uint32_t len)1991 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1992 {
1993     int rc = 0;
1994 
1995     mYuvMemory = new QCamera3HeapMemory();
1996     if (!mYuvMemory) {
1997         ALOGE("%s: unable to create metadata memory", __func__);
1998         return NULL;
1999     }
2000 
2001     //Queue YUV buffers in the beginning mQueueAll = true
2002     rc = mYuvMemory->allocate(mCamera3Stream->max_buffers, len, false);
2003     if (rc < 0) {
2004         ALOGE("%s: unable to allocate metadata memory", __func__);
2005         delete mYuvMemory;
2006         mYuvMemory = NULL;
2007         return NULL;
2008     }
2009     return mYuvMemory;
2010 }
2011 
putStreamBufs()2012 void QCamera3PicChannel::putStreamBufs()
2013 {
2014     mMemory.unregisterBuffers();
2015 
2016     mYuvMemory->deallocate();
2017     delete mYuvMemory;
2018     mYuvMemory = NULL;
2019 }
2020 
queueReprocMetadata(mm_camera_super_buf_t * metadata)2021 int32_t QCamera3PicChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
2022 {
2023     return m_postprocessor.processPPMetadata(metadata);
2024 }
2025 
queueJpegSetting(int32_t index,metadata_buffer_t * metadata)2026 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
2027 {
2028     jpeg_settings_t *settings =
2029             (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
2030 
2031     if (!settings) {
2032         ALOGE("%s: out of memory allocating jpeg_settings", __func__);
2033         return -ENOMEM;
2034     }
2035 
2036     memset(settings, 0, sizeof(jpeg_settings_t));
2037 
2038     settings->out_buf_index = index;
2039 
2040     settings->jpeg_orientation = 0;
2041     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
2042         int32_t *orientation = (int32_t *)POINTER_OF_PARAM(
2043                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
2044         settings->jpeg_orientation = *orientation;
2045     }
2046 
2047     settings->jpeg_quality = 85;
2048     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
2049         uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2050                 CAM_INTF_META_JPEG_QUALITY, metadata);
2051         settings->jpeg_quality = *quality;
2052     }
2053 
2054     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
2055         uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2056                 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2057         settings->jpeg_thumb_quality = *quality;
2058     }
2059 
2060     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
2061         cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF_PARAM(
2062                 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2063         settings->thumbnail_size = *dimension;
2064     }
2065 
2066     settings->gps_timestamp_valid = 0;
2067     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
2068         int64_t *timestamp = (int64_t *)POINTER_OF_PARAM(
2069                 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2070         settings->gps_timestamp = *timestamp;
2071         settings->gps_timestamp_valid = 1;
2072     }
2073 
2074     settings->gps_coordinates_valid = 0;
2075     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
2076         double *coordinates = (double *)POINTER_OF_PARAM(
2077                 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2078         memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
2079         settings->gps_coordinates_valid = 1;
2080     }
2081 
2082     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
2083         char *proc_methods = (char *)POINTER_OF_PARAM(
2084                 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2085         memset(settings->gps_processing_method, 0,
2086                 sizeof(settings->gps_processing_method));
2087         strncpy(settings->gps_processing_method, proc_methods,
2088                 sizeof(settings->gps_processing_method));
2089     }
2090 
2091     return m_postprocessor.processJpegSettingData(settings);
2092 }
2093 
2094 /*===========================================================================
2095  * FUNCTION   : getRational
2096  *
2097  * DESCRIPTION: compose rational struct
2098  *
2099  * PARAMETERS :
2100  *   @rat     : ptr to struct to store rational info
2101  *   @num     :num of the rational
2102  *   @denom   : denom of the rational
2103  *
2104  * RETURN     : int32_t type of status
2105  *              NO_ERROR  -- success
2106  *              none-zero failure code
2107  *==========================================================================*/
getRational(rat_t * rat,int num,int denom)2108 int32_t getRational(rat_t *rat, int num, int denom)
2109 {
2110     if (NULL == rat) {
2111         ALOGE("%s: NULL rat input", __func__);
2112         return BAD_VALUE;
2113     }
2114     rat->num = num;
2115     rat->denom = denom;
2116     return NO_ERROR;
2117 }
2118 
2119 /*===========================================================================
2120  * FUNCTION   : parseGPSCoordinate
2121  *
2122  * DESCRIPTION: parse GPS coordinate string
2123  *
2124  * PARAMETERS :
2125  *   @coord_str : [input] coordinate string
2126  *   @coord     : [output]  ptr to struct to store coordinate
2127  *
2128  * RETURN     : int32_t type of status
2129  *              NO_ERROR  -- success
2130  *              none-zero failure code
2131  *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)2132 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
2133 {
2134     if(coord == NULL) {
2135         ALOGE("%s: error, invalid argument coord == NULL", __func__);
2136         return BAD_VALUE;
2137     }
2138     float degF = atof(coord_str);
2139     if (degF < 0) {
2140         degF = -degF;
2141     }
2142     float minF = (degF - (int) degF) * 60;
2143     float secF = (minF - (int) minF) * 60;
2144 
2145     getRational(&coord[0], (int)degF, 1);
2146     getRational(&coord[1], (int)minF, 1);
2147     getRational(&coord[2], (int)(secF * 10000), 10000);
2148     return NO_ERROR;
2149 }
2150 
2151 /*===========================================================================
2152  * FUNCTION   : getExifDateTime
2153  *
2154  * DESCRIPTION: query exif date time
2155  *
2156  * PARAMETERS :
2157  *   @dateTime   : string to store exif date time
2158  *   @subsecTime : string to store exif subsec time
2159  *   @count      : length of the dateTime string
2160  *   @subsecCount: length of the subsecTime string
2161  *
2162  * RETURN     : int32_t type of status
2163  *              NO_ERROR  -- success
2164  *              none-zero failure code
2165  *==========================================================================*/
getExifDateTime(char * dateTime,char * subsecTime,uint32_t & count,uint32_t & subsecCount)2166 int32_t getExifDateTime(char *dateTime, char *subsecTime,
2167         uint32_t &count, uint32_t &subsecCount)
2168 {
2169     //get time and date from system
2170     struct timeval tv;
2171     struct tm *timeinfo;
2172 
2173     gettimeofday(&tv, NULL);
2174     timeinfo = localtime(&tv.tv_sec);
2175     //Write datetime according to EXIF Spec
2176     //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
2177     snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
2178              timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2179              timeinfo->tm_mday, timeinfo->tm_hour,
2180              timeinfo->tm_min, timeinfo->tm_sec);
2181     count = 20;
2182 
2183     //Write subsec according to EXIF Sepc
2184     snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
2185     subsecCount = 7;
2186     return NO_ERROR;
2187 }
2188 
2189 /*===========================================================================
2190  * FUNCTION   : getExifFocalLength
2191  *
2192  * DESCRIPTION: get exif focal lenght
2193  *
2194  * PARAMETERS :
2195  *   @focalLength : ptr to rational strcut to store focal lenght
2196  *
2197  * RETURN     : int32_t type of status
2198  *              NO_ERROR  -- success
2199  *              none-zero failure code
2200  *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)2201 int32_t getExifFocalLength(rat_t *focalLength, float value)
2202 {
2203     int focalLengthValue =
2204         (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
2205     return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
2206 }
2207 
2208 /*===========================================================================
2209   * FUNCTION   : getExifExpTimeInfo
2210   *
2211   * DESCRIPTION: get exif exposure time information
2212   *
2213   * PARAMETERS :
2214   *   @expoTimeInfo     : expousure time value
2215   * RETURN     : nt32_t type of status
2216   *              NO_ERROR  -- success
2217   *              none-zero failure code
2218   *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)2219 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
2220 {
2221 
2222     int cal_exposureTime;
2223     if (value != 0)
2224         cal_exposureTime = value;
2225     else
2226         cal_exposureTime = 60;
2227 
2228     return getRational(expoTimeInfo, 1, cal_exposureTime);
2229 }
2230 
2231 /*===========================================================================
2232  * FUNCTION   : getExifGpsProcessingMethod
2233  *
2234  * DESCRIPTION: get GPS processing method
2235  *
2236  * PARAMETERS :
2237  *   @gpsProcessingMethod : string to store GPS process method
2238  *   @count               : lenght of the string
2239  *
2240  * RETURN     : int32_t type of status
2241  *              NO_ERROR  -- success
2242  *              none-zero failure code
2243  *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)2244 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
2245                                    uint32_t &count, char* value)
2246 {
2247     if(value != NULL) {
2248         memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
2249         count = EXIF_ASCII_PREFIX_SIZE;
2250         strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
2251         count += strlen(value);
2252         gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
2253         return NO_ERROR;
2254     } else {
2255         return BAD_VALUE;
2256     }
2257 }
2258 
2259 /*===========================================================================
2260  * FUNCTION   : getExifLatitude
2261  *
2262  * DESCRIPTION: get exif latitude
2263  *
2264  * PARAMETERS :
2265  *   @latitude : ptr to rational struct to store latitude info
2266  *   @ladRef   : charater to indicate latitude reference
2267  *
2268  * RETURN     : int32_t type of status
2269  *              NO_ERROR  -- success
2270  *              none-zero failure code
2271  *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)2272 int32_t getExifLatitude(rat_t *latitude,
2273                                            char *latRef, double value)
2274 {
2275     char str[30];
2276     snprintf(str, sizeof(str), "%f", value);
2277     if(str != NULL) {
2278         parseGPSCoordinate(str, latitude);
2279 
2280         //set Latitude Ref
2281         float latitudeValue = strtof(str, 0);
2282         if(latitudeValue < 0.0f) {
2283             latRef[0] = 'S';
2284         } else {
2285             latRef[0] = 'N';
2286         }
2287         latRef[1] = '\0';
2288         return NO_ERROR;
2289     }else{
2290         return BAD_VALUE;
2291     }
2292 }
2293 
2294 /*===========================================================================
2295  * FUNCTION   : getExifLongitude
2296  *
2297  * DESCRIPTION: get exif longitude
2298  *
2299  * PARAMETERS :
2300  *   @longitude : ptr to rational struct to store longitude info
2301  *   @lonRef    : charater to indicate longitude reference
2302  *
2303  * RETURN     : int32_t type of status
2304  *              NO_ERROR  -- success
2305  *              none-zero failure code
2306  *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)2307 int32_t getExifLongitude(rat_t *longitude,
2308                                             char *lonRef, double value)
2309 {
2310     char str[30];
2311     snprintf(str, sizeof(str), "%f", value);
2312     if(str != NULL) {
2313         parseGPSCoordinate(str, longitude);
2314 
2315         //set Longitude Ref
2316         float longitudeValue = strtof(str, 0);
2317         if(longitudeValue < 0.0f) {
2318             lonRef[0] = 'W';
2319         } else {
2320             lonRef[0] = 'E';
2321         }
2322         lonRef[1] = '\0';
2323         return NO_ERROR;
2324     }else{
2325         return BAD_VALUE;
2326     }
2327 }
2328 
2329 /*===========================================================================
2330  * FUNCTION   : getExifAltitude
2331  *
2332  * DESCRIPTION: get exif altitude
2333  *
2334  * PARAMETERS :
2335  *   @altitude : ptr to rational struct to store altitude info
2336  *   @altRef   : charater to indicate altitude reference
2337  *
2338  * RETURN     : int32_t type of status
2339  *              NO_ERROR  -- success
2340  *              none-zero failure code
2341  *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)2342 int32_t getExifAltitude(rat_t *altitude,
2343                                            char *altRef, double value)
2344 {
2345     char str[30];
2346     snprintf(str, sizeof(str), "%f", value);
2347     if(str != NULL) {
2348         double value = atof(str);
2349         *altRef = 0;
2350         if(value < 0){
2351             *altRef = 1;
2352             value = -value;
2353         }
2354         return getRational(altitude, value*1000, 1000);
2355     }else{
2356         return BAD_VALUE;
2357     }
2358 }
2359 
2360 /*===========================================================================
2361  * FUNCTION   : getExifGpsDateTimeStamp
2362  *
2363  * DESCRIPTION: get exif GPS date time stamp
2364  *
2365  * PARAMETERS :
2366  *   @gpsDateStamp : GPS date time stamp string
2367  *   @bufLen       : length of the string
2368  *   @gpsTimeStamp : ptr to rational struct to store time stamp info
2369  *
2370  * RETURN     : int32_t type of status
2371  *              NO_ERROR  -- success
2372  *              none-zero failure code
2373  *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)2374 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
2375                                            uint32_t bufLen,
2376                                            rat_t *gpsTimeStamp, int64_t value)
2377 {
2378     char str[30];
2379     snprintf(str, sizeof(str), "%lld", value);
2380     if(str != NULL) {
2381         time_t unixTime = (time_t)atol(str);
2382         struct tm *UTCTimestamp = gmtime(&unixTime);
2383 
2384         strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
2385 
2386         getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
2387         getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
2388         getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
2389 
2390         return NO_ERROR;
2391     } else {
2392         return BAD_VALUE;
2393     }
2394 }
2395 
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)2396 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
2397                              cam_rational_type_t step)
2398 {
2399     exposure_val->num = exposure_comp * step.numerator;
2400     exposure_val->denom = step.denominator;
2401     return 0;
2402 }
2403 /*===========================================================================
2404  * FUNCTION   : getExifData
2405  *
2406  * DESCRIPTION: get exif data to be passed into jpeg encoding
2407  *
2408  * PARAMETERS : none
2409  *
2410  * RETURN     : exif data from user setting and GPS
2411  *==========================================================================*/
getExifData(metadata_buffer_t * metadata,jpeg_settings_t * jpeg_settings)2412 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
2413         jpeg_settings_t *jpeg_settings)
2414 {
2415     QCamera3Exif *exif = new QCamera3Exif();
2416     if (exif == NULL) {
2417         ALOGE("%s: No memory for QCamera3Exif", __func__);
2418         return NULL;
2419     }
2420 
2421     int32_t rc = NO_ERROR;
2422     uint32_t count = 0;
2423 
2424     // add exif entries
2425     {
2426         char dateTime[20];
2427         char subsecTime[7];
2428         uint32_t subsecCount;
2429         memset(dateTime, 0, sizeof(dateTime));
2430         memset(subsecTime, 0, sizeof(subsecTime));
2431         count = 20;
2432         subsecCount = 7;
2433         rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
2434         if(rc == NO_ERROR) {
2435             exif->addEntry(EXIFTAGID_DATE_TIME,
2436                     EXIF_ASCII,
2437                     count,
2438                     (void *)dateTime);
2439             exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
2440                     EXIF_ASCII,
2441                     count,
2442                     (void *)dateTime);
2443             exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
2444                     EXIF_ASCII,
2445                     count,
2446                     (void *)dateTime);
2447             exif->addEntry(EXIFTAGID_SUBSEC_TIME,
2448                     EXIF_ASCII,
2449                     subsecCount,
2450                     (void *)subsecTime);
2451             exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
2452                     EXIF_ASCII,
2453                     subsecCount,
2454                     (void *)subsecTime);
2455             exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
2456                     EXIF_ASCII,
2457                     subsecCount,
2458                     (void *)subsecTime);
2459         } else {
2460             ALOGE("%s: getExifDateTime failed", __func__);
2461         }
2462     }
2463 
2464     if (IS_PARAM_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
2465         float focal_length = *(float *)POINTER_OF_PARAM(
2466                 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2467         rat_t focalLength;
2468         rc = getExifFocalLength(&focalLength, focal_length);
2469         if (rc == NO_ERROR) {
2470             exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
2471                     EXIF_RATIONAL,
2472                     1,
2473                     (void *)&(focalLength));
2474         } else {
2475             ALOGE("%s: getExifFocalLength failed", __func__);
2476         }
2477     }
2478 
2479     if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
2480         int16_t isoSpeed = *(int32_t *)POINTER_OF_PARAM(
2481                 CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2482         exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
2483                    EXIF_SHORT,
2484                    1,
2485                    (void *)&(isoSpeed));
2486     }
2487 
2488     if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
2489         int64_t sensor_exposure_time = *(int64_t *)POINTER_OF_PARAM(
2490                 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2491         rat_t sensorExpTime;
2492         rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
2493         if (rc == NO_ERROR){
2494             exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
2495                     EXIF_RATIONAL,
2496                     1,
2497                     (void *)&(sensorExpTime));
2498         } else {
2499             ALOGE("%s: getExifExpTimeInfo failed", __func__);
2500         }
2501     }
2502 
2503     if (strlen(jpeg_settings->gps_processing_method) > 0) {
2504         char gpsProcessingMethod[
2505                     EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
2506         count = 0;
2507         rc = getExifGpsProcessingMethod(gpsProcessingMethod,
2508                 count, jpeg_settings->gps_processing_method);
2509         if(rc == NO_ERROR) {
2510             exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
2511                     EXIF_ASCII,
2512                     count,
2513                     (void *)gpsProcessingMethod);
2514         } else {
2515             ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
2516         }
2517     }
2518 
2519     if (jpeg_settings->gps_coordinates_valid) {
2520 
2521         //latitude
2522         rat_t latitude[3];
2523         char latRef[2];
2524         rc = getExifLatitude(latitude, latRef,
2525                 jpeg_settings->gps_coordinates[0]);
2526         if(rc == NO_ERROR) {
2527             exif->addEntry(EXIFTAGID_GPS_LATITUDE,
2528                            EXIF_RATIONAL,
2529                            3,
2530                            (void *)latitude);
2531             exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
2532                            EXIF_ASCII,
2533                            2,
2534                            (void *)latRef);
2535         } else {
2536             ALOGE("%s: getExifLatitude failed", __func__);
2537         }
2538 
2539         //longitude
2540         rat_t longitude[3];
2541         char lonRef[2];
2542         rc = getExifLongitude(longitude, lonRef,
2543                 jpeg_settings->gps_coordinates[1]);
2544         if(rc == NO_ERROR) {
2545             exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
2546                            EXIF_RATIONAL,
2547                            3,
2548                            (void *)longitude);
2549 
2550             exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
2551                            EXIF_ASCII,
2552                            2,
2553                            (void *)lonRef);
2554         } else {
2555             ALOGE("%s: getExifLongitude failed", __func__);
2556         }
2557 
2558         //altitude
2559         rat_t altitude;
2560         char altRef;
2561         rc = getExifAltitude(&altitude, &altRef,
2562                 jpeg_settings->gps_coordinates[2]);
2563         if(rc == NO_ERROR) {
2564             exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
2565                            EXIF_RATIONAL,
2566                            1,
2567                            (void *)&(altitude));
2568 
2569             exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
2570                            EXIF_BYTE,
2571                            1,
2572                            (void *)&altRef);
2573         } else {
2574             ALOGE("%s: getExifAltitude failed", __func__);
2575         }
2576     }
2577 
2578     if (jpeg_settings->gps_timestamp_valid) {
2579 
2580         char gpsDateStamp[20];
2581         rat_t gpsTimeStamp[3];
2582         rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
2583                 jpeg_settings->gps_timestamp);
2584         if(rc == NO_ERROR) {
2585             exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
2586                            EXIF_ASCII,
2587                            strlen(gpsDateStamp) + 1,
2588                            (void *)gpsDateStamp);
2589 
2590             exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
2591                            EXIF_RATIONAL,
2592                            3,
2593                            (void *)gpsTimeStamp);
2594         } else {
2595             ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
2596         }
2597     }
2598 
2599     if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) &&
2600             IS_PARAM_AVAILABLE(CAM_INTF_PARM_EV_STEP, metadata)) {
2601         int32_t exposure_comp = *(int32_t *)POINTER_OF_PARAM(
2602                 CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2603         cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF_PARAM(
2604                 CAM_INTF_PARM_EV_STEP, metadata);
2605         srat_t exposure_val;
2606         rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
2607         if(rc == NO_ERROR) {
2608             exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
2609                        EXIF_SRATIONAL,
2610                        1,
2611                        (void *)(&exposure_val));
2612         } else {
2613             ALOGE("%s: getExifExposureValue failed ", __func__);
2614         }
2615     }
2616 
2617     char value[PROPERTY_VALUE_MAX];
2618     if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
2619         exif->addEntry(EXIFTAGID_MAKE,
2620                        EXIF_ASCII,
2621                        strlen(value) + 1,
2622                        (void *)value);
2623     } else {
2624         ALOGE("%s: getExifMaker failed", __func__);
2625     }
2626 
2627     if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
2628         exif->addEntry(EXIFTAGID_MODEL,
2629                        EXIF_ASCII,
2630                        strlen(value) + 1,
2631                        (void *)value);
2632     } else {
2633         ALOGE("%s: getExifModel failed", __func__);
2634     }
2635 
2636     return exif;
2637 }
2638 
2639 /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up. Hence
2640  allocating same number of picture channel buffers */
2641 int QCamera3PicChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
2642 
overrideYuvSize(uint32_t width,uint32_t height)2643 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
2644 {
2645    mYuvWidth = width;
2646    mYuvHeight = height;
2647 }
2648 
2649 /*===========================================================================
2650  * FUNCTION   : QCamera3ReprocessChannel
2651  *
2652  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2653  *
2654  * PARAMETERS :
2655  *   @cam_handle : camera handle
2656  *   @cam_ops    : ptr to camera ops table
2657  *   @pp_mask    : post-proccess feature mask
2658  *
2659  * RETURN     : none
2660  *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData,void * ch_hdl)2661 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
2662                                                  mm_camera_ops_t *cam_ops,
2663                                                  channel_cb_routine cb_routine,
2664                                                  cam_padding_info_t *paddingInfo,
2665                                                  uint32_t postprocess_mask,
2666                                                  void *userData, void *ch_hdl) :
2667     QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, postprocess_mask,
2668                     userData),
2669     picChHandle(ch_hdl),
2670     mOfflineBuffersIndex(-1),
2671     m_pSrcChannel(NULL),
2672     m_pMetaChannel(NULL),
2673     mMemory(NULL)
2674 {
2675     memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
2676     mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS -1;
2677 }
2678 
2679 
2680 /*===========================================================================
2681  * FUNCTION   : QCamera3ReprocessChannel
2682  *
2683  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2684  *
2685  * PARAMETERS :
2686  *   @cam_handle : camera handle
2687  *   @cam_ops    : ptr to camera ops table
2688  *   @pp_mask    : post-proccess feature mask
2689  *
2690  * RETURN     : none
2691  *==========================================================================*/
initialize(cam_is_type_t isType)2692 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
2693 {
2694     int32_t rc = NO_ERROR;
2695     mm_camera_channel_attr_t attr;
2696 
2697     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
2698     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
2699     attr.max_unmatched_frames = 1;
2700 
2701     rc = init(&attr, NULL);
2702     if (rc < 0) {
2703         ALOGE("%s: init failed", __func__);
2704     }
2705     mIsType = isType;
2706     return rc;
2707 }
2708 
2709 
2710 /*===========================================================================
2711  * FUNCTION   : QCamera3ReprocessChannel
2712  *
2713  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2714  *
2715  * PARAMETERS :
2716  *   @cam_handle : camera handle
2717  *   @cam_ops    : ptr to camera ops table
2718  *   @pp_mask    : post-proccess feature mask
2719  *
2720  * RETURN     : none
2721  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2722 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2723                                   QCamera3Stream *stream)
2724 {
2725     //Got the pproc data callback. Now send to jpeg encoding
2726     uint8_t frameIndex;
2727     mm_camera_super_buf_t* frame = NULL;
2728     QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
2729 
2730     if(!super_frame) {
2731          ALOGE("%s: Invalid Super buffer",__func__);
2732          return;
2733     }
2734 
2735     if(super_frame->num_bufs != 1) {
2736          ALOGE("%s: Multiple streams are not supported",__func__);
2737          return;
2738     }
2739     if(super_frame->bufs[0] == NULL ) {
2740          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
2741                   __func__);
2742          return;
2743     }
2744 
2745     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
2746     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
2747     if (frame == NULL) {
2748        ALOGE("%s: Error allocating memory to save received_frame structure.",
2749                                                                     __func__);
2750        if(stream) {
2751            stream->bufDone(frameIndex);
2752        }
2753        return;
2754     }
2755     CDBG("%s: bufIndex: %u recvd from post proc",
2756         __func__, (uint32_t)frameIndex);
2757     *frame = *super_frame;
2758     if(mYUVDump) {
2759         cam_dimension_t dim;
2760         memset(&dim, 0, sizeof(dim));
2761         stream->getFrameDimension(dim);
2762         cam_frame_len_offset_t offset;
2763         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2764         stream->getFrameOffset(offset);
2765         dumpYUV(frame->bufs[0], dim, offset, 2);
2766     }
2767     obj->m_postprocessor.processPPData(frame);
2768     free(super_frame);
2769     return;
2770 }
2771 
2772 /*===========================================================================
2773  * FUNCTION   : QCamera3ReprocessChannel
2774  *
2775  * DESCRIPTION: default constructor of QCamera3ReprocessChannel
2776  *
2777  * PARAMETERS : none
2778  *
2779  * RETURN     : none
2780  *==========================================================================*/
QCamera3ReprocessChannel()2781 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
2782     m_pSrcChannel(NULL),
2783     m_pMetaChannel(NULL)
2784 {
2785 }
2786 
2787 /*===========================================================================
2788  * FUNCTION   : getStreamBufs
2789  *
2790  * DESCRIPTION: register the buffers of the reprocess channel
2791  *
2792  * PARAMETERS : none
2793  *
2794  * RETURN     : QCamera3Memory *
2795  *==========================================================================*/
getStreamBufs(uint32_t len)2796 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
2797 {
2798    int rc = 0;
2799 
2800     mMemory = new QCamera3HeapMemory();
2801     if (!mMemory) {
2802         ALOGE("%s: unable to create reproc memory", __func__);
2803         return NULL;
2804     }
2805 
2806     //Queue YUV buffers in the beginning mQueueAll = true
2807     /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
2808      * Hence allocating same number of reprocess channel's output buffers */
2809     rc = mMemory->allocate(MAX_INFLIGHT_REQUESTS, len, true);
2810     if (rc < 0) {
2811         ALOGE("%s: unable to allocate reproc memory", __func__);
2812         delete mMemory;
2813         mMemory = NULL;
2814         return NULL;
2815     }
2816     return mMemory;
2817 }
2818 
2819 /*===========================================================================
2820  * FUNCTION   : getStreamBufs
2821  *
2822  * DESCRIPTION: register the buffers of the reprocess channel
2823  *
2824  * PARAMETERS : none
2825  *
2826  * RETURN     :
2827  *==========================================================================*/
putStreamBufs()2828 void QCamera3ReprocessChannel::putStreamBufs()
2829 {
2830     mMemory->deallocate();
2831     delete mMemory;
2832     mMemory = NULL;
2833 }
2834 
2835 /*===========================================================================
2836  * FUNCTION   : ~QCamera3ReprocessChannel
2837  *
2838  * DESCRIPTION: destructor of QCamera3ReprocessChannel
2839  *
2840  * PARAMETERS : none
2841  *
2842  * RETURN     : none
2843  *==========================================================================*/
~QCamera3ReprocessChannel()2844 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2845 {
2846 }
2847 
2848 /*===========================================================================
2849  * FUNCTION   : getStreamBySrcHandle
2850  *
2851  * DESCRIPTION: find reprocess stream by its source stream handle
2852  *
2853  * PARAMETERS :
2854  *   @srcHandle : source stream handle
2855  *
2856  * RETURN     : ptr to reprocess stream if found. NULL if not found
2857  *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)2858 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
2859 {
2860     QCamera3Stream *pStream = NULL;
2861 
2862     for (int i = 0; i < m_numStreams; i++) {
2863         if (mSrcStreamHandles[i] == srcHandle) {
2864             pStream = mStreams[i];
2865             break;
2866         }
2867     }
2868     return pStream;
2869 }
2870 
2871 /*===========================================================================
2872  * FUNCTION   : getSrcStreamBySrcHandle
2873  *
2874  * DESCRIPTION: find source stream by source stream handle
2875  *
2876  * PARAMETERS :
2877  *   @srcHandle : source stream handle
2878  *
2879  * RETURN     : ptr to reprocess stream if found. NULL if not found
2880  *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)2881 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
2882 {
2883     QCamera3Stream *pStream = NULL;
2884 
2885     if (NULL == m_pSrcChannel) {
2886         return NULL;
2887     }
2888 
2889     for (int i = 0; i < m_numStreams; i++) {
2890         if (mSrcStreamHandles[i] == srcHandle) {
2891             pStream = m_pSrcChannel->getStreamByIndex(i);
2892             break;
2893         }
2894     }
2895     return pStream;
2896 }
2897 
2898 /*===========================================================================
2899  * FUNCTION   : stop
2900  *
2901  * DESCRIPTION: stop channel
2902  *
2903  * PARAMETERS : none
2904  *
2905  * RETURN     : int32_t type of status
2906  *              NO_ERROR  -- success
2907  *              none-zero failure code
2908  *==========================================================================*/
stop()2909 int32_t QCamera3ReprocessChannel::stop()
2910 {
2911     unmapOfflineBuffers(true);
2912 
2913     return QCamera3Channel::stop();
2914 }
2915 
2916 /*===========================================================================
2917  * FUNCTION   : unmapOfflineBuffers
2918  *
2919  * DESCRIPTION: Unmaps offline buffers
2920  *
2921  * PARAMETERS : none
2922  *
2923  * RETURN     : int32_t type of status
2924  *              NO_ERROR  -- success
2925  *              none-zero failure code
2926  *==========================================================================*/
unmapOfflineBuffers(bool all)2927 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
2928 {
2929     int rc = NO_ERROR;
2930     if (!mOfflineBuffers.empty()) {
2931         QCamera3Stream *stream = NULL;
2932         List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
2933         for (; it != mOfflineBuffers.end(); it++) {
2934            stream = (*it).stream;
2935            if (NULL != stream) {
2936                rc = stream->unmapBuf((*it).type,
2937                                      (*it).index,
2938                                         -1);
2939                if (NO_ERROR != rc) {
2940                    ALOGE("%s: Error during offline buffer unmap %d",
2941                          __func__, rc);
2942                }
2943                CDBG("%s: Unmapped buffer with index %d", __func__, (*it).index);
2944            }
2945            if (!all) {
2946                mOfflineBuffers.erase(it);
2947                break;
2948            }
2949         }
2950         if (all) {
2951            mOfflineBuffers.clear();
2952         }
2953     }
2954 
2955     if (!mOfflineMetaBuffers.empty()) {
2956         QCamera3Stream *stream = NULL;
2957         List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
2958         for (; it != mOfflineMetaBuffers.end(); it++) {
2959            stream = (*it).stream;
2960            if (NULL != stream) {
2961                rc = stream->unmapBuf((*it).type,
2962                                      (*it).index,
2963                                         -1);
2964                if (NO_ERROR != rc) {
2965                    ALOGE("%s: Error during offline buffer unmap %d",
2966                          __func__, rc);
2967                }
2968                CDBG("%s: Unmapped meta buffer with index %d", __func__, (*it).index);
2969            }
2970            if (!all) {
2971                mOfflineMetaBuffers.erase(it);
2972                break;
2973            }
2974         }
2975         if (all) {
2976            mOfflineMetaBuffers.clear();
2977         }
2978     }
2979     return rc;
2980 }
2981 
2982 
2983 /*===========================================================================
2984  * FUNCTION   : extractFrameAndRotation
2985  *
2986  * DESCRIPTION: Extract output rotation and frame data if present
2987  *
2988  * PARAMETERS :
2989  *   @frame     : input frame from source stream
2990  *   meta_buffer: metadata buffer
2991  *   @metadata  : corresponding metadata
2992  *   @fwk_frame :
2993  *
2994  * RETURN     : int32_t type of status
2995  *              NO_ERROR  -- success
2996  *              none-zero failure code
2997  *==========================================================================*/
extractFrameCropAndRotation(mm_camera_super_buf_t * frame,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)2998 int32_t QCamera3ReprocessChannel::extractFrameCropAndRotation(mm_camera_super_buf_t *frame,
2999         mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
3000         qcamera_fwk_input_pp_data_t &fwk_frame)
3001 {
3002     if ((NULL == meta_buffer) || (NULL == frame) || (NULL == jpeg_settings)) {
3003         return BAD_VALUE;
3004     }
3005 
3006     metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
3007     if (NULL == meta) {
3008         return BAD_VALUE;
3009     }
3010 
3011     for (int i = 0; i < frame->num_bufs; i++) {
3012         QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
3013         QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
3014 
3015         if (pStream != NULL && pSrcStream != NULL) {
3016             // Find rotation info for reprocess stream
3017             if (jpeg_settings->jpeg_orientation == 0) {
3018                fwk_frame.reproc_config.rotation = ROTATE_0;
3019             } else if (jpeg_settings->jpeg_orientation == 90) {
3020                fwk_frame.reproc_config.rotation = ROTATE_90;
3021             } else if (jpeg_settings->jpeg_orientation == 180) {
3022                fwk_frame.reproc_config.rotation = ROTATE_180;
3023             } else if (jpeg_settings->jpeg_orientation == 270) {
3024                fwk_frame.reproc_config.rotation = ROTATE_270;
3025             }
3026 
3027             // Find crop info for reprocess stream
3028             cam_crop_data_t *crop_data = (cam_crop_data_t *)
3029                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
3030             if (NULL != crop_data) {
3031                 for (int j = 0; j < crop_data->num_of_streams; j++) {
3032                     if (crop_data->crop_info[j].stream_id ==
3033                         pSrcStream->getMyServerID()) {
3034                         fwk_frame.reproc_config.output_crop =
3035                             crop_data->crop_info[0].crop;
3036                         CDBG("%s: Found offline reprocess crop %dx%d %dx%d",
3037                               __func__,
3038                               crop_data->crop_info[0].crop.left,
3039                               crop_data->crop_info[0].crop.top,
3040                               crop_data->crop_info[0].crop.width,
3041                               crop_data->crop_info[0].crop.height);
3042                      }
3043                 }
3044             }
3045             fwk_frame.input_buffer = *frame->bufs[i];
3046             fwk_frame.metadata_buffer = *meta_buffer;
3047             break;
3048         } else {
3049             ALOGE("%s: Source/Re-process streams are invalid", __func__);
3050             return BAD_VALUE;
3051         }
3052     }
3053 
3054     return NO_ERROR;
3055 }
3056 
3057 /*===========================================================================
3058  * FUNCTION   : extractCrop
3059  *
3060  * DESCRIPTION: Extract framework output crop if present
3061  *
3062  * PARAMETERS :
3063  *   @frame     : input frame for reprocessing
3064  *
3065  * RETURN     : int32_t type of status
3066  *              NO_ERROR  -- success
3067  *              none-zero failure code
3068  *==========================================================================*/
extractCrop(qcamera_fwk_input_pp_data_t * frame)3069 int32_t QCamera3ReprocessChannel::extractCrop(qcamera_fwk_input_pp_data_t *frame)
3070 {
3071     if (NULL == frame) {
3072         ALOGE("%s: Incorrect input frame", __func__);
3073         return BAD_VALUE;
3074     }
3075 
3076     if (NULL == frame->metadata_buffer.buffer) {
3077         ALOGE("%s: No metadata available", __func__);
3078         return BAD_VALUE;
3079     }
3080 
3081     // Find crop info for reprocess stream
3082     metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
3083     if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, meta)) {
3084         cam_crop_data_t *crop_data = (cam_crop_data_t *)
3085                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
3086         if (1 == crop_data->num_of_streams) {
3087             frame->reproc_config.output_crop = crop_data->crop_info[0].crop;
3088             CDBG("%s: Found offline reprocess crop %dx%d %dx%d", __func__,
3089                     crop_data->crop_info[0].crop.left,
3090                     crop_data->crop_info[0].crop.top,
3091                     crop_data->crop_info[0].crop.width,
3092                     crop_data->crop_info[0].crop.height);
3093         } else {
3094             ALOGE("%s: Incorrect number of offline crop data entries %d",
3095                     __func__,
3096                     crop_data->num_of_streams);
3097             return BAD_VALUE;
3098         }
3099     } else {
3100         CDBG_HIGH("%s: Crop data not present", __func__);
3101     }
3102 
3103     return NO_ERROR;
3104 }
3105 
3106 /*===========================================================================
3107  * FUNCTION   : doReprocessOffline
3108  *
3109  * DESCRIPTION: request to do a reprocess on the frame
3110  *
3111  * PARAMETERS :
3112  *   @frame     : input frame for reprocessing
3113  *
3114  * RETURN     : int32_t type of status
3115  *              NO_ERROR  -- success
3116  *              none-zero failure code
3117  *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame)3118  int32_t QCamera3ReprocessChannel::doReprocessOffline(qcamera_fwk_input_pp_data_t *frame)
3119 {
3120     int32_t rc = 0;
3121     OfflineBuffer mappedBuffer;
3122 
3123     if (m_numStreams < 1) {
3124         ALOGE("%s: No reprocess stream is created", __func__);
3125         return -1;
3126     }
3127 
3128     if (NULL == frame) {
3129         ALOGE("%s: Incorrect input frame", __func__);
3130         return BAD_VALUE;
3131     }
3132 
3133     if (NULL == frame->metadata_buffer.buffer) {
3134         ALOGE("%s: No metadata available", __func__);
3135         return BAD_VALUE;
3136     }
3137 
3138     if (NULL == frame->input_buffer.buffer) {
3139         ALOGE("%s: No input buffer available", __func__);
3140         return BAD_VALUE;
3141     }
3142 
3143     if ((0 == m_numStreams) || (NULL == mStreams[0])) {
3144         ALOGE("%s: Reprocess stream not initialized!", __func__);
3145         return NO_INIT;
3146     }
3147 
3148     QCamera3Stream *pStream = mStreams[0];
3149     int32_t max_idx = MAX_INFLIGHT_REQUESTS-1;
3150     //loop back the indices if max burst count reached
3151     if (mOfflineBuffersIndex == max_idx) {
3152        mOfflineBuffersIndex = -1;
3153     }
3154     uint32_t buf_idx = mOfflineBuffersIndex + 1;
3155 
3156     rc = pStream->mapBuf(
3157             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3158             buf_idx, -1,
3159             frame->input_buffer.fd, frame->input_buffer.frame_len);
3160     if (NO_ERROR == rc) {
3161         mappedBuffer.index = buf_idx;
3162         mappedBuffer.stream = pStream;
3163         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
3164         mOfflineBuffers.push_back(mappedBuffer);
3165         mOfflineBuffersIndex = buf_idx;
3166         CDBG("%s: Mapped buffer with index %d", __func__, mOfflineBuffersIndex);
3167     }
3168 
3169     max_idx = MAX_INFLIGHT_REQUESTS*2 - 1;
3170     //loop back the indices if max burst count reached
3171     if (mOfflineMetaIndex == max_idx) {
3172        mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS-1;
3173     }
3174     uint32_t meta_buf_idx = mOfflineMetaIndex + 1;
3175 
3176     rc |= pStream->mapBuf(
3177             CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
3178             meta_buf_idx, -1,
3179             frame->metadata_buffer.fd, frame->metadata_buffer.frame_len);
3180     if (NO_ERROR == rc) {
3181         mappedBuffer.index = meta_buf_idx;
3182         mappedBuffer.stream = pStream;
3183         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
3184         mOfflineMetaBuffers.push_back(mappedBuffer);
3185         mOfflineMetaIndex = meta_buf_idx;
3186         CDBG("%s: Mapped meta buffer with index %d", __func__, mOfflineMetaIndex);
3187     }
3188 
3189     if (rc == NO_ERROR) {
3190         cam_stream_parm_buffer_t param;
3191         memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
3192         param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3193         param.reprocess.buf_index = buf_idx;
3194         param.reprocess.frame_idx = frame->input_buffer.frame_idx;
3195         param.reprocess.meta_present = 1;
3196         param.reprocess.meta_buf_index = meta_buf_idx;
3197         param.reprocess.frame_pp_config.rotation = frame->reproc_config.rotation;
3198         param.reprocess.frame_pp_config.crop.input_crop = frame->reproc_config.output_crop;
3199         param.reprocess.frame_pp_config.crop.crop_enabled = 1;
3200         rc = pStream->setParameter(param);
3201         if (rc != NO_ERROR) {
3202             ALOGE("%s: stream setParameter for reprocess failed", __func__);
3203         }
3204     } else {
3205         ALOGE("%s: Input buffer memory map failed: %d", __func__, rc);
3206     }
3207 
3208     return rc;
3209 }
3210 
3211 /*===========================================================================
3212  * FUNCTION   : doReprocess
3213  *
3214  * DESCRIPTION: request to do a reprocess on the frame
3215  *
3216  * PARAMETERS :
3217  *   @buf_fd     : fd to the input buffer that needs reprocess
3218  *   @buf_lenght : length of the input buffer
3219  *   @ret_val    : result of reprocess.
3220  *                 Example: Could be faceID in case of register face image.
3221  *
3222  * RETURN     : int32_t type of status
3223  *              NO_ERROR  -- success
3224  *              none-zero failure code
3225  *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)3226 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
3227                                               uint32_t buf_length,
3228                                               int32_t &ret_val,
3229                                               mm_camera_super_buf_t *meta_frame)
3230 {
3231     int32_t rc = 0;
3232     if (m_numStreams < 1) {
3233         ALOGE("%s: No reprocess stream is created", __func__);
3234         return -1;
3235     }
3236     if (meta_frame == NULL) {
3237         ALOGE("%s: Did not get corresponding metadata in time", __func__);
3238         return -1;
3239     }
3240 
3241     uint32_t buf_idx = 0;
3242     for (int i = 0; i < m_numStreams; i++) {
3243         rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3244                                  buf_idx, -1,
3245                                  buf_fd, buf_length);
3246 
3247         if (rc == NO_ERROR) {
3248             cam_stream_parm_buffer_t param;
3249             memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
3250             param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3251             param.reprocess.buf_index = buf_idx;
3252             param.reprocess.meta_present = 1;
3253             param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
3254             param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
3255             rc = mStreams[i]->setParameter(param);
3256             if (rc == NO_ERROR) {
3257                 ret_val = param.reprocess.ret_val;
3258             }
3259             mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3260                                   buf_idx, -1);
3261         }
3262     }
3263     return rc;
3264 }
3265 
3266 /*===========================================================================
3267  * FUNCTION   : addReprocStreamsFromSource
3268  *
3269  * DESCRIPTION: add reprocess streams from input source channel
3270  *
3271  * PARAMETERS :
3272  *   @config         : pp feature configuration
3273  *   @src_config     : source reprocess configuration
3274  *   @pMetaChannel   : ptr to metadata channel to get corresp. metadata
3275  *   @offline        : configure for offline reprocessing
3276  *
3277  * RETURN     : int32_t type of status
3278  *              NO_ERROR  -- success
3279  *              none-zero failure code
3280  *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)3281 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
3282         const reprocess_config_t &src_config , cam_is_type_t is_type,
3283         QCamera3Channel *pMetaChannel)
3284 {
3285     int32_t rc = 0;
3286     cam_stream_reproc_config_t reprocess_config;
3287     cam_stream_type_t streamType;
3288 
3289     /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
3290      * Hence allocating same number of reprocess channel's output buffers */
3291     int num_buffers = MAX_INFLIGHT_REQUESTS;
3292     cam_dimension_t streamDim = src_config.output_stream_dim;
3293 
3294     if (NULL != src_config.src_channel) {
3295         QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
3296         if (pSrcStream == NULL) {
3297            ALOGE("%s: source channel doesn't have a stream", __func__);
3298            return BAD_VALUE;
3299         }
3300         mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
3301     }
3302 
3303     streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
3304     reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
3305 
3306     reprocess_config.offline.input_fmt = src_config.stream_format;
3307     reprocess_config.offline.input_dim = src_config.input_stream_dim;
3308     reprocess_config.offline.input_buf_planes.plane_info =
3309             src_config.input_stream_plane_info.plane_info;
3310     reprocess_config.offline.num_of_bufs = num_buffers;
3311     reprocess_config.offline.input_type = src_config.stream_type;
3312 
3313     reprocess_config.pp_feature_config = pp_config;
3314     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
3315             m_handle,
3316             m_camOps,
3317             mPaddingInfo,
3318             (QCamera3Channel*)this);
3319     if (pStream == NULL) {
3320         ALOGE("%s: No mem for Stream", __func__);
3321         return NO_MEMORY;
3322     }
3323 
3324     rc = pStream->init(streamType, src_config.stream_format,
3325             streamDim, ROTATE_0, &reprocess_config,
3326             num_buffers,
3327             reprocess_config.pp_feature_config.feature_mask,
3328             is_type,
3329             QCamera3Channel::streamCbRoutine, this);
3330 
3331     if (rc == 0) {
3332         mStreams[m_numStreams] = pStream;
3333         m_numStreams++;
3334     } else {
3335         ALOGE("%s: failed to create reprocess stream", __func__);
3336         delete pStream;
3337     }
3338 
3339     if (rc == NO_ERROR) {
3340         m_pSrcChannel = src_config.src_channel;
3341         m_pMetaChannel = pMetaChannel;
3342     }
3343     if(m_camOps->request_super_buf(m_camHandle,m_handle,1,0) < 0) {
3344         ALOGE("%s: Request for super buffer failed",__func__);
3345     }
3346     return rc;
3347 }
3348 
3349 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
3350 
QCamera3SupportChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)3351 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
3352                     mm_camera_ops_t *cam_ops,
3353                     cam_padding_info_t *paddingInfo,
3354                     uint32_t postprocess_mask,
3355                     void *userData) :
3356                         QCamera3Channel(cam_handle, cam_ops,
3357                                 NULL, paddingInfo, postprocess_mask, userData),
3358                         mMemory(NULL)
3359 {
3360 }
3361 
~QCamera3SupportChannel()3362 QCamera3SupportChannel::~QCamera3SupportChannel()
3363 {
3364     if (m_bIsActive)
3365         stop();
3366 
3367     if (mMemory) {
3368         mMemory->deallocate();
3369         delete mMemory;
3370         mMemory = NULL;
3371     }
3372 }
3373 
initialize(cam_is_type_t isType)3374 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
3375 {
3376     int32_t rc;
3377 
3378     if (mMemory || m_numStreams > 0) {
3379         ALOGE("%s: metadata channel already initialized", __func__);
3380         return -EINVAL;
3381     }
3382 
3383     rc = init(NULL, NULL);
3384     if (rc < 0) {
3385         ALOGE("%s: init failed", __func__);
3386         return rc;
3387     }
3388     mIsType = isType;
3389     // Hardcode to VGA size for now
3390     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
3391             CAM_FORMAT_YUV_420_NV21, kDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM,
3392             mPostProcMask, mIsType);
3393     if (rc < 0) {
3394         ALOGE("%s: addStream failed", __func__);
3395     }
3396     return rc;
3397 }
3398 
request(buffer_handle_t *,uint32_t)3399 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
3400                                                 uint32_t /*frameNumber*/)
3401 {
3402     return NO_ERROR;
3403 }
3404 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)3405 void QCamera3SupportChannel::streamCbRoutine(
3406                         mm_camera_super_buf_t *super_frame,
3407                         QCamera3Stream * /*stream*/)
3408 {
3409     if (super_frame == NULL || super_frame->num_bufs != 1) {
3410         ALOGE("%s: super_frame is not valid", __func__);
3411         return;
3412     }
3413     bufDone(super_frame);
3414     free(super_frame);
3415 }
3416 
getStreamBufs(uint32_t len)3417 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
3418 {
3419     int rc;
3420 
3421     mMemory = new QCamera3HeapMemory();
3422     if (!mMemory) {
3423         ALOGE("%s: unable to create heap memory", __func__);
3424         return NULL;
3425     }
3426     rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
3427     if (rc < 0) {
3428         ALOGE("%s: unable to allocate heap memory", __func__);
3429         delete mMemory;
3430         mMemory = NULL;
3431         return NULL;
3432     }
3433     return mMemory;
3434 }
3435 
putStreamBufs()3436 void QCamera3SupportChannel::putStreamBufs()
3437 {
3438     mMemory->deallocate();
3439     delete mMemory;
3440     mMemory = NULL;
3441 }
3442 
3443 }; // namespace qcamera
3444