1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 
31 #define LOG_TAG "QCamera3Channel"
32 
33 // To remove
34 #include <cutils/properties.h>
35 
36 // System dependencies
37 #include <fcntl.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include "hardware/gralloc.h"
41 #include <utils/Timers.h>
42 #include <sys/stat.h>
43 
44 // Camera dependencies
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47 #include "QCameraTrace.h"
48 #include "QCameraFormat.h"
49 extern "C" {
50 #include "mm_camera_dbg.h"
51 }
52 
53 using namespace android;
54 
55 namespace qcamera {
56 #define IS_BUFFER_ERROR(x) (((x) & V4L2_BUF_FLAG_ERROR) == V4L2_BUF_FLAG_ERROR)
57 
58 /*===========================================================================
59  * FUNCTION   : QCamera3Channel
60  *
61  * DESCRIPTION: constrcutor of QCamera3Channel
62  *
63  * PARAMETERS :
64  *   @cam_handle : camera handle
65  *   @cam_ops    : ptr to camera ops table
66  *
67  * RETURN     : none
68  *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)69 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
70                                uint32_t channel_handle,
71                                mm_camera_ops_t *cam_ops,
72                                channel_cb_routine cb_routine,
73                                channel_cb_buffer_err cb_buffer_err,
74                                cam_padding_info_t *paddingInfo,
75                                cam_feature_mask_t postprocess_mask,
76                                void *userData, uint32_t numBuffers)
77 {
78     m_camHandle = cam_handle;
79     m_handle = channel_handle;
80     m_camOps = cam_ops;
81     m_bIsActive = false;
82     m_bUBWCenable = true;
83 
84     m_numStreams = 0;
85     memset(mStreams, 0, sizeof(mStreams));
86     mUserData = userData;
87 
88     mStreamInfoBuf = NULL;
89     mChannelCB = cb_routine;
90     mChannelCbBufErr = cb_buffer_err;
91     mPaddingInfo = *paddingInfo;
92     mPaddingInfo.offset_info.offset_x = 0;
93     mPaddingInfo.offset_info.offset_y = 0;
94 
95     mPostProcMask = postprocess_mask;
96 
97     mIsType = IS_TYPE_NONE;
98     mNumBuffers = numBuffers;
99     mPerFrameMapUnmapEnable = true;
100     mDumpFrmCnt = 0;
101     mNRMode = 0;
102 
103     mYUVDump = property_get_int32("persist.camera.dumpimg", 0);
104     mMapStreamBuffers = mYUVDump;
105 }
106 
107 /*===========================================================================
108  * FUNCTION   : ~QCamera3Channel
109  *
110  * DESCRIPTION: destructor of QCamera3Channel
111  *
112  * PARAMETERS : none
113  *
114  * RETURN     : none
115  *==========================================================================*/
~QCamera3Channel()116 QCamera3Channel::~QCamera3Channel()
117 {
118 }
119 
120 /*===========================================================================
121  * FUNCTION   : destroy
122  *
123  * DESCRIPTION: internal destructor of QCamera3Channel called by the subclasses
124  *              this destructor will call pure virtual functions.  stop will eventuall call
125  *              QCamera3Stream::putBufs.  The putBufs function will
126  *              call QCamera3Channel::putStreamBufs which is pure virtual
127  *
128  * PARAMETERS : none
129  *
130  * RETURN     : none
131  *==========================================================================*/
destroy()132 void QCamera3Channel::destroy()
133 {
134     if (m_bIsActive)
135         stop();
136 
137     for (uint32_t i = 0; i < m_numStreams; i++) {
138         if (mStreams[i] != NULL) {
139             delete mStreams[i];
140             mStreams[i] = 0;
141         }
142     }
143     m_numStreams = 0;
144 }
145 
146 /*===========================================================================
147  * FUNCTION   : addStream
148  *
149  * DESCRIPTION: add a stream into channel
150  *
151  * PARAMETERS :
152  *   @streamType     : stream type
153  *   @streamFormat   : stream format
154  *   @streamDim      : stream dimension
155  *   @streamRotation : rotation of the stream
156  *   @minStreamBufNum : minimal buffer count for particular stream type
157  *   @postprocessMask : post-proccess feature mask
158  *   @isType         : type of image stabilization required on the stream
159  *
160  * RETURN     : int32_t type of status
161  *              NO_ERROR  -- success
162  *              none-zero failure code
163  *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,cam_rotation_t streamRotation,uint8_t minStreamBufNum,cam_feature_mask_t postprocessMask,cam_is_type_t isType,uint32_t batchSize)164 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
165                                   cam_format_t streamFormat,
166                                   cam_dimension_t streamDim,
167                                   cam_rotation_t streamRotation,
168                                   uint8_t minStreamBufNum,
169                                   cam_feature_mask_t postprocessMask,
170                                   cam_is_type_t isType,
171                                   uint32_t batchSize)
172 {
173     int32_t rc = NO_ERROR;
174 
175     if (m_numStreams >= 1) {
176         LOGE("Only one stream per channel supported in v3 Hal");
177         return BAD_VALUE;
178     }
179 
180     if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
181         LOGE("stream number (%d) exceeds max limit (%d)",
182                m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
183         return BAD_VALUE;
184     }
185     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
186                                                m_handle,
187                                                m_camOps,
188                                                &mPaddingInfo,
189                                                this,
190                                                mMapStreamBuffers);
191     if (pStream == NULL) {
192         LOGE("No mem for Stream");
193         return NO_MEMORY;
194     }
195     LOGD("batch size is %d", batchSize);
196 
197     rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
198             NULL, minStreamBufNum, postprocessMask, isType, batchSize,
199             streamCbRoutine, this);
200     if (rc == 0) {
201         mStreams[m_numStreams] = pStream;
202         m_numStreams++;
203     } else {
204         delete pStream;
205     }
206     return rc;
207 }
208 
209 /*===========================================================================
210  * FUNCTION   : start
211  *
212  * DESCRIPTION: start channel, which will start all streams belong to this channel
213  *
214  * PARAMETERS :
215  *
216  * RETURN     : int32_t type of status
217  *              NO_ERROR  -- success
218  *              none-zero failure code
219  *==========================================================================*/
start()220 int32_t QCamera3Channel::start()
221 {
222     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_START);
223     int32_t rc = NO_ERROR;
224 
225     if (m_numStreams > 1) {
226         LOGW("bundle not supported");
227     } else if (m_numStreams == 0) {
228         return NO_INIT;
229     }
230 
231     if(m_bIsActive) {
232         LOGW("Attempt to start active channel");
233         return rc;
234     }
235 
236     for (uint32_t i = 0; i < m_numStreams; i++) {
237         if (mStreams[i] != NULL) {
238             mStreams[i]->start();
239         }
240     }
241 
242     m_bIsActive = true;
243 
244     return rc;
245 }
246 
247 /*===========================================================================
248  * FUNCTION   : stop
249  *
250  * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
251  *
252  * PARAMETERS : none
253  *
254  * RETURN     : int32_t type of status
255  *              NO_ERROR  -- success
256  *              none-zero failure code
257  *==========================================================================*/
stop()258 int32_t QCamera3Channel::stop()
259 {
260     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_STOP);
261     int32_t rc = NO_ERROR;
262     if(!m_bIsActive) {
263         LOGE("Attempt to stop inactive channel");
264         return rc;
265     }
266 
267     for (uint32_t i = 0; i < m_numStreams; i++) {
268         if (mStreams[i] != NULL) {
269             mStreams[i]->stop();
270         }
271     }
272 
273     m_bIsActive = false;
274     return rc;
275 }
276 
277 /*===========================================================================
278  * FUNCTION   : setBatchSize
279  *
280  * DESCRIPTION: Set batch size for the channel. This is a dummy implementation
281  *              for the base class
282  *
283  * PARAMETERS :
284  *   @batchSize  : Number of image buffers in a batch
285  *
286  * RETURN     : int32_t type of status
287  *              NO_ERROR  -- success always
288  *              none-zero failure code
289  *==========================================================================*/
setBatchSize(uint32_t batchSize)290 int32_t QCamera3Channel::setBatchSize(uint32_t batchSize)
291 {
292     LOGD("Dummy method. batchSize: %d unused ", batchSize);
293     return NO_ERROR;
294 }
295 
296 /*===========================================================================
297  * FUNCTION   : queueBatchBuf
298  *
299  * DESCRIPTION: This is a dummy implementation for the base class
300  *
301  * PARAMETERS :
302  *
303  * RETURN     : int32_t type of status
304  *              NO_ERROR  -- success always
305  *              none-zero failure code
306  *==========================================================================*/
queueBatchBuf()307 int32_t QCamera3Channel::queueBatchBuf()
308 {
309     LOGD("Dummy method. Unused ");
310     return NO_ERROR;
311 }
312 
313 /*===========================================================================
314  * FUNCTION   : setPerFrameMapUnmap
315  *
316  * DESCRIPTION: Sets internal enable flag
317  *
318  * PARAMETERS :
319  *  @enable : Bool value for the enable flag
320  *
321  * RETURN     : int32_t type of status
322  *              NO_ERROR  -- success always
323  *              none-zero failure code
324  *==========================================================================*/
setPerFrameMapUnmap(bool enable)325 int32_t QCamera3Channel::setPerFrameMapUnmap(bool enable)
326 {
327     mPerFrameMapUnmapEnable = enable;
328     return NO_ERROR;
329 }
330 
331 /*===========================================================================
332  * FUNCTION   : flush
333  *
334  * DESCRIPTION: flush a channel
335  *
336  * PARAMETERS : none
337  *
338  * RETURN     : int32_t type of status
339  *              NO_ERROR  -- success
340  *              none-zero failure code
341  *==========================================================================*/
flush()342 int32_t QCamera3Channel::flush()
343 {
344     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_FLUSH);
345     return NO_ERROR;
346 }
347 
348 /*===========================================================================
349  * FUNCTION   : bufDone
350  *
351  * DESCRIPTION: return a stream buf back to kernel
352  *
353  * PARAMETERS :
354  *   @recvd_frame  : stream buf frame to be returned
355  *
356  * RETURN     : int32_t type of status
357  *              NO_ERROR  -- success
358  *              none-zero failure code
359  *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)360 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
361 {
362     int32_t rc = NO_ERROR;
363     for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
364          if (recvd_frame->bufs[i] != NULL) {
365              for (uint32_t j = 0; j < m_numStreams; j++) {
366                  if (mStreams[j] != NULL &&
367                      mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
368                      rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
369                      break; // break loop j
370                  }
371              }
372          }
373     }
374 
375     return rc;
376 }
377 
setBundleInfo(const cam_bundle_config_t & bundleInfo)378 int32_t QCamera3Channel::setBundleInfo(const cam_bundle_config_t &bundleInfo)
379 {
380     int32_t rc = NO_ERROR;
381     cam_stream_parm_buffer_t param;
382     memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
383     param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
384     param.bundleInfo = bundleInfo;
385     if (m_numStreams > 0 && mStreams[0]) {
386         rc = mStreams[0]->setParameter(param);
387         if (rc != NO_ERROR) {
388             LOGE("stream setParameter for set bundle failed");
389         }
390     }
391     return rc;
392 }
393 
394 /*===========================================================================
395  * FUNCTION   : getStreamTypeMask
396  *
397  * DESCRIPTION: Get bit mask of all stream types in this channel
398  *
399  * PARAMETERS : None
400  *
401  * RETURN     : Bit mask of all stream types in this channel
402  *==========================================================================*/
getStreamTypeMask()403 uint32_t QCamera3Channel::getStreamTypeMask()
404 {
405     uint32_t mask = 0;
406     for (uint32_t i = 0; i < m_numStreams; i++) {
407        mask |= (1U << mStreams[i]->getMyType());
408     }
409     return mask;
410 }
411 
412 /*===========================================================================
413  * FUNCTION   : getStreamID
414  *
415  * DESCRIPTION: Get StreamID of requested stream type
416  *
417  * PARAMETERS : streamMask
418  *
419  * RETURN     : Stream ID
420  *==========================================================================*/
getStreamID(uint32_t streamMask)421 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
422 {
423     uint32_t streamID = 0;
424     for (uint32_t i = 0; i < m_numStreams; i++) {
425         if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
426             streamID = mStreams[i]->getMyServerID();
427             break;
428         }
429     }
430     return streamID;
431 }
432 
433 /*===========================================================================
434  * FUNCTION   : getStreamByHandle
435  *
436  * DESCRIPTION: return stream object by stream handle
437  *
438  * PARAMETERS :
439  *   @streamHandle : stream handle
440  *
441  * RETURN     : stream object. NULL if not found
442  *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)443 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
444 {
445     for (uint32_t i = 0; i < m_numStreams; i++) {
446         if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
447             return mStreams[i];
448         }
449     }
450     return NULL;
451 }
452 
453 /*===========================================================================
454  * FUNCTION   : getStreamByIndex
455  *
456  * DESCRIPTION: return stream object by index
457  *
458  * PARAMETERS :
459  *   @streamHandle : stream handle
460  *
461  * RETURN     : stream object. NULL if not found
462  *==========================================================================*/
getStreamByIndex(uint32_t index)463 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index)
464 {
465     if (index < m_numStreams) {
466         return mStreams[index];
467     }
468     return NULL;
469 }
470 
471 /*===========================================================================
472  * FUNCTION   : streamCbRoutine
473  *
474  * DESCRIPTION: callback routine for stream
475  *
476  * PARAMETERS :
477  *   @streamHandle : stream handle
478  *
479  * RETURN     : stream object. NULL if not found
480  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)481 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
482                 QCamera3Stream *stream, void *userdata)
483 {
484     QCamera3Channel *channel = (QCamera3Channel *)userdata;
485     if (channel == NULL) {
486         LOGE("invalid channel pointer");
487         return;
488     }
489     channel->streamCbRoutine(super_frame, stream);
490 }
491 
492 /*===========================================================================
493  * FUNCTION   : dumpYUV
494  *
495  * DESCRIPTION: function to dump the YUV data from ISP/pproc
496  *
497  * PARAMETERS :
498  *   @frame   : frame to be dumped
499  *   @dim     : dimension of the stream
500  *   @offset  : offset of the data
501  *   @name    : 1 if it is ISP output/pproc input, 2 if it is pproc output
502  *
503  * RETURN  :
504  *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t dump_type)505 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
506         cam_frame_len_offset_t offset, uint8_t dump_type)
507 {
508     char buf[FILENAME_MAX];
509     memset(buf, 0, sizeof(buf));
510     static int counter = 0;
511     if (mYUVDump & dump_type) {
512         mFrmNum = ((mYUVDump & 0xffff0000) >> 16);
513         if (mFrmNum == 0) {
514             mFrmNum = 10;
515         }
516         if (mFrmNum > 256) {
517             mFrmNum = 256;
518         }
519         mSkipMode = ((mYUVDump & 0x0000ff00) >> 8);
520         if (mSkipMode == 0) {
521             mSkipMode = 1;
522         }
523         if (mDumpSkipCnt == 0) {
524             mDumpSkipCnt = 1;
525         }
526         if (mDumpSkipCnt % mSkipMode == 0) {
527             if (mDumpFrmCnt < mFrmNum) {
528                 /* Note that the image dimension will be the unrotated stream dimension.
529                 * If you feel that the image would have been rotated during reprocess
530                 * then swap the dimensions while opening the file
531                 * */
532                 switch (dump_type) {
533                     case QCAMERA_DUMP_FRM_PREVIEW:
534                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"p_%d_%d_%dx%d.yuv",
535                             counter, frame->frame_idx, dim.width, dim.height);
536                     break;
537                     case QCAMERA_DUMP_FRM_VIDEO:
538                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"v_%d_%d_%dx%d.yuv",
539                             counter, frame->frame_idx, dim.width, dim.height);
540                     break;
541                     case QCAMERA_DUMP_FRM_INPUT_JPEG:
542                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.yuv",
543                             counter, frame->frame_idx, dim.width, dim.height);
544                     break;
545                     case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
546                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"ir_%d_%d_%dx%d.yuv",
547                             counter, frame->frame_idx, dim.width, dim.height);
548                     break;
549                     case QCAMERA_DUMP_FRM_CALLBACK:
550                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"c_%d_%d_%dx%d.yuv",
551                             counter, frame->frame_idx, dim.width, dim.height);
552                     break;
553                     case QCAMERA_DUMP_FRM_OUTPUT_JPEG:
554                         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"j_%d_%d_%dx%d.jpg",
555                             counter, frame->frame_idx, dim.width, dim.height);
556                     break;
557                     default :
558                         LOGE("dumping not enabled for stream type %d",dump_type);
559                     break;
560                 }
561                 counter++;
562                 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
563                 ssize_t written_len = 0;
564                 if (file_fd >= 0) {
565                     void *data = NULL;
566                     fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
567                     if( dump_type == QCAMERA_DUMP_FRM_OUTPUT_JPEG ) {
568                         written_len = write(file_fd, frame->buffer, frame->frame_len);
569                     }
570                     else {
571                         for (uint32_t i = 0; i < offset.num_planes; i++) {
572                             uint32_t index = offset.mp[i].offset;
573                             if (i > 0) {
574                                 index += offset.mp[i-1].len;
575                             }
576                             for (int j = 0; j < offset.mp[i].height; j++) {
577                                 data = (void *)((uint8_t *)frame->buffer + index);
578                                 written_len += write(file_fd, data,
579                                         (size_t)offset.mp[i].width);
580                                 index += (uint32_t)offset.mp[i].stride;
581                             }
582                         }
583                     }
584                     LOGH("written number of bytes %ld\n", written_len);
585                     mDumpFrmCnt++;
586                     frame->cache_flags |= CPU_HAS_READ;
587                     close(file_fd);
588                 } else {
589                     LOGE("failed to open file to dump image");
590                 }
591             }
592         } else {
593             mDumpSkipCnt++;
594         }
595     }
596 }
597 
598 /*===========================================================================
599  * FUNCTION   : isUBWCEnabled
600  *
601  * DESCRIPTION: Function to get UBWC hardware support.
602  *
603  * PARAMETERS : None
604  *
605  * RETURN     : TRUE -- UBWC format supported
606  *              FALSE -- UBWC is not supported.
607  *==========================================================================*/
isUBWCEnabled()608 bool QCamera3Channel::isUBWCEnabled()
609 {
610 #ifdef UBWC_PRESENT
611     char value[PROPERTY_VALUE_MAX];
612     int prop_value = 0;
613     memset(value, 0, sizeof(value));
614     property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
615     prop_value = atoi(value);
616     if (prop_value) {
617         return FALSE;
618     }
619 
620     //Disable UBWC if Eztune is enabled
621     //EzTune process CPP output frame and cannot understand UBWC.
622     memset(value, 0, sizeof(value));
623     property_get("persist.camera.eztune.enable", value, "0");
624     prop_value = atoi(value);
625     if (prop_value) {
626         return FALSE;
627     }
628     return TRUE;
629 #else
630     return FALSE;
631 #endif
632 }
633 
634 /*===========================================================================
635  * FUNCTION   : setUBWCEnabled
636  *
637  * DESCRIPTION: set UBWC enable
638  *
639  * PARAMETERS : UBWC enable value
640  *
641  * RETURN     : none
642  *
643  *==========================================================================*/
setUBWCEnabled(bool val)644 void QCamera3Channel::setUBWCEnabled(bool val)
645 {
646     m_bUBWCenable = val;
647 }
648 
649 /*===========================================================================
650  * FUNCTION   : getStreamDefaultFormat
651  *
652  * DESCRIPTION: return default buffer format for the stream
653  *
654  * PARAMETERS : type : Stream type
655  *
656  ** RETURN    : format for stream type
657  *
658  *==========================================================================*/
getStreamDefaultFormat(cam_stream_type_t type,uint32_t width,uint32_t height,bool forcePreviewUBWC,cam_is_type_t isType)659 cam_format_t QCamera3Channel::getStreamDefaultFormat(cam_stream_type_t type,
660         uint32_t width, uint32_t height, bool forcePreviewUBWC, cam_is_type_t isType)
661 {
662     cam_format_t streamFormat;
663 
664     switch (type) {
665     case CAM_STREAM_TYPE_PREVIEW:
666         if (isUBWCEnabled()) {
667 
668             char prop[PROPERTY_VALUE_MAX];
669             int pFormat;
670             memset(prop, 0, sizeof(prop));
671             property_get("persist.camera.preview.ubwc", prop, "1");
672             pFormat = atoi(prop);
673 
674             // When goog_zoom is linked to the preview stream, disable ubwc to preview
675             property_get("persist.camera.gzoom.at", prop, "0");
676             bool is_goog_zoom_preview_enabled = ((atoi(prop) & 2) > 0) && isType == IS_TYPE_EIS_3_0;
677 
678             if (pFormat == 1 && forcePreviewUBWC && !is_goog_zoom_preview_enabled) {
679                 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
680             } else {
681                 /* Changed to macro to ensure format sent to gralloc for preview
682                 is also changed if the preview format is changed at camera HAL */
683                 streamFormat = PREVIEW_STREAM_FORMAT;
684             }
685         } else {
686             /* Changed to macro to ensure format sent to gralloc for preview
687             is also changed if the preview format is changed at camera HAL */
688             streamFormat = PREVIEW_STREAM_FORMAT;
689         }
690         break;
691     case CAM_STREAM_TYPE_VIDEO:
692     {
693         /* Disable UBWC for smaller video resolutions due to CPP downscale
694             limits. Refer cpp_hw_params.h::CPP_DOWNSCALE_LIMIT_UBWC */
695         if (isUBWCEnabled() && (width >= 640) && (height >= 480)) {
696             // When goog_zoom is linked to the video stream, disable ubwc to video
697             char prop[PROPERTY_VALUE_MAX];
698             property_get("persist.camera.gzoom.at", prop, "0");
699             bool is_goog_zoom_video_enabled = ((atoi(prop) & 1) > 0) && isType == IS_TYPE_EIS_3_0;
700 
701             property_get("persist.camera.gzoom.4k", prop, "0");
702             bool is_goog_zoom_4k_enabled = (atoi(prop) > 0);
703             bool is_4k_video = (width >= 3840 && height >= 2160);
704 
705             if ((QCameraCommon::isVideoUBWCEnabled()) && (!is_goog_zoom_video_enabled
706                     || (is_4k_video && !is_goog_zoom_4k_enabled))) {
707                 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
708             } else {
709                 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
710             }
711         } else {
712 #if VENUS_PRESENT
713         streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
714 #else
715         streamFormat = CAM_FORMAT_YUV_420_NV12;
716 #endif
717         }
718         break;
719     }
720     case CAM_STREAM_TYPE_SNAPSHOT:
721         streamFormat = CAM_FORMAT_YUV_420_NV21;
722         break;
723     case CAM_STREAM_TYPE_CALLBACK:
724         /* Changed to macro to ensure format sent to gralloc for callback
725         is also changed if the preview format is changed at camera HAL */
726         streamFormat = CALLBACK_STREAM_FORMAT;
727         break;
728     case CAM_STREAM_TYPE_RAW:
729         streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
730         break;
731     default:
732         streamFormat = CAM_FORMAT_YUV_420_NV21;
733         break;
734     }
735     return streamFormat;
736 }
737 
738 
739 /* QCamera3ProcessingChannel methods */
740 
741 /*===========================================================================
742  * FUNCTION   : QCamera3ProcessingChannel
743  *
744  * DESCRIPTION: constructor of QCamera3ProcessingChannel
745  *
746  * PARAMETERS :
747  *   @cam_handle : camera handle
748  *   @cam_ops    : ptr to camera ops table
749  *   @cb_routine : callback routine to frame aggregator
750  *   @paddingInfo: stream padding info
751  *   @userData   : HWI handle
752  *   @stream     : camera3_stream_t structure
753  *   @stream_type: Channel stream type
754  *   @postprocess_mask: the postprocess mask for streams of this channel
755  *   @metadataChannel: handle to the metadataChannel
756  *   @numBuffers : number of max dequeued buffers
757  * RETURN     : none
758  *==========================================================================*/
QCamera3ProcessingChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)759 QCamera3ProcessingChannel::QCamera3ProcessingChannel(uint32_t cam_handle,
760         uint32_t channel_handle,
761         mm_camera_ops_t *cam_ops,
762         channel_cb_routine cb_routine,
763         channel_cb_buffer_err cb_buffer_err,
764         cam_padding_info_t *paddingInfo,
765         void *userData,
766         camera3_stream_t *stream,
767         cam_stream_type_t stream_type,
768         cam_feature_mask_t postprocess_mask,
769         QCamera3Channel *metadataChannel,
770         uint32_t numBuffers) :
771             QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine,
772                     cb_buffer_err, paddingInfo, postprocess_mask, userData, numBuffers),
773             m_postprocessor(this),
774             mFrameCount(0),
775             mLastFrameCount(0),
776             mLastFpsTime(0),
777             mMemory(numBuffers),
778             mCamera3Stream(stream),
779             mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM),
780             mStreamType(stream_type),
781             mPostProcStarted(false),
782             mReprocessType(REPROCESS_TYPE_NONE),
783             mInputBufferConfig(false),
784             m_pMetaChannel(metadataChannel),
785             mMetaFrame(NULL),
786             mOfflineMemory(0),
787             mOfflineMetaMemory(numBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1))
788 {
789     char prop[PROPERTY_VALUE_MAX];
790     property_get("persist.debug.sf.showfps", prop, "0");
791     mDebugFPS = (uint8_t) atoi(prop);
792 
793     int32_t rc = m_postprocessor.init(&mMemory);
794     if (rc != 0) {
795         LOGE("Init Postprocessor failed");
796     }
797 }
798 
799 /*===========================================================================
800  * FUNCTION   : ~QCamera3ProcessingChannel
801  *
802  * DESCRIPTION: destructor of QCamera3ProcessingChannel
803  *
804  * PARAMETERS : none
805  *
806  * RETURN     : none
807  *==========================================================================*/
~QCamera3ProcessingChannel()808 QCamera3ProcessingChannel::~QCamera3ProcessingChannel()
809 {
810     destroy();
811 
812     int32_t rc = m_postprocessor.deinit();
813     if (rc != 0) {
814         LOGE("De-init Postprocessor failed");
815     }
816 
817     if (0 < mOfflineMetaMemory.getCnt()) {
818         mOfflineMetaMemory.deallocate();
819     }
820     if (0 < mOfflineMemory.getCnt()) {
821         mOfflineMemory.unregisterBuffers();
822     }
823 
824 }
825 
826 /*===========================================================================
827  * FUNCTION   : streamCbRoutine
828  *
829  * DESCRIPTION:
830  *
831  * PARAMETERS :
832  * @super_frame : the super frame with filled buffer
833  * @stream      : stream on which the buffer was requested and filled
834  *
835  * RETURN     : none
836  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)837 void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
838         QCamera3Stream *stream)
839 {
840     if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
841         KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PREVIEW_STRM_CB);
842     } else {
843         ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_STRM_CB);
844     }
845     //FIXME Q Buf back in case of error?
846     uint8_t frameIndex;
847     buffer_handle_t *resultBuffer;
848     int32_t resultFrameNumber;
849     camera3_stream_buffer_t result;
850     cam_dimension_t dim;
851     cam_frame_len_offset_t offset;
852 
853     memset(&dim, 0, sizeof(dim));
854     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
855     if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
856         LOGE("Error with the stream callback");
857         return;
858     }
859 
860     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
861     if(frameIndex >= mNumBufs) {
862          LOGE("Error, Invalid index for buffer");
863          stream->bufDone(frameIndex);
864          return;
865     }
866 
867     if (mDebugFPS) {
868         showDebugFPS(stream->getMyType());
869     }
870     stream->getFrameDimension(dim);
871     stream->getFrameOffset(offset);
872     if (stream->getMyType() == CAM_STREAM_TYPE_PREVIEW) {
873         dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_PREVIEW);
874     } else if (stream->getMyType() == CAM_STREAM_TYPE_VIDEO) {
875         dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_VIDEO);
876     } else if (stream->getMyType() == CAM_STREAM_TYPE_CALLBACK) {
877         dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_CALLBACK);
878     }
879 
880     do {
881 
882        //Use below data to issue framework callback
883        resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
884        resultFrameNumber = mMemory.getFrameNumber(frameIndex);
885        uint32_t oldestBufIndex;
886        int32_t lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
887        QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
888        if ((lowestFrameNumber != -1 ) && (lowestFrameNumber < resultFrameNumber) &&
889             hal_obj->mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) {
890            LOGE("Error buffer dropped for framenumber:%d with bufidx:%d",
891                    lowestFrameNumber, oldestBufIndex);
892            if (mOutOfSequenceBuffers.empty()) {
893               stream->cancelBuffer(oldestBufIndex);
894            }
895 
896            //push in order!
897            auto itr = mOutOfSequenceBuffers.begin();
898            for (; itr != mOutOfSequenceBuffers.end(); itr++) {
899                mm_camera_super_buf_t *super_buf = *itr;
900                uint32_t buf_idx = super_buf->bufs[0]->buf_idx;
901                int32_t frame_num = mMemory.getFrameNumber(buf_idx);
902                if (resultFrameNumber < frame_num) {
903                    LOGE("Out of order frame!! set buffer status error flag!");
904                    mOutOfSequenceBuffers.insert(itr, super_frame);
905                    super_buf->bufs[0]->flags |= V4L2_BUF_FLAG_ERROR;
906                    break;
907                }
908            }
909 
910            if (itr == mOutOfSequenceBuffers.end()) {
911                LOGE("Add the frame to the end of mOutOfSequenceBuffers");
912                // add the buffer
913                mOutOfSequenceBuffers.push_back(super_frame);
914            }
915            return;
916        }
917 
918        if(hal_obj->mStreamConfig == true) {
919           switch (stream->getMyType()) {
920               case CAM_STREAM_TYPE_PREVIEW:
921                   LOGH("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
922                   break;
923               case CAM_STREAM_TYPE_VIDEO:
924                   LOGH("[KPI Perf] : PROFILE_FIRST_VIDEO_FRAME");
925                   break;
926               default:
927                   break;
928           }
929           hal_obj->mStreamConfig = false;
930        }
931 
932        result.stream = mCamera3Stream;
933        result.buffer = resultBuffer;
934        if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
935            result.status = CAMERA3_BUFFER_STATUS_ERROR;
936            LOGW("CAMERA3_BUFFER_STATUS_ERROR for stream_type: %d",
937                    mStreams[0]->getMyType());
938            mChannelCbBufErr(this, resultFrameNumber, CAMERA3_BUFFER_STATUS_ERROR, mUserData);
939        } else {
940            result.status = CAMERA3_BUFFER_STATUS_OK;
941        }
942        result.acquire_fence = -1;
943        result.release_fence = -1;
944        if(mPerFrameMapUnmapEnable) {
945            int32_t rc = stream->bufRelease(frameIndex);
946            if (NO_ERROR != rc) {
947                LOGE("Error %d releasing stream buffer %d",
948                         rc, frameIndex);
949            }
950 
951            rc = mMemory.unregisterBuffer(frameIndex);
952            if (NO_ERROR != rc) {
953                LOGE("Error %d unregistering stream buffer %d",
954                         rc, frameIndex);
955            }
956        }
957 
958        if (0 <= resultFrameNumber) {
959            if (mChannelCB) {
960                mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
961            }
962        } else {
963            LOGE("Bad frame number");
964        }
965        free(super_frame);
966        super_frame = NULL;
967        if (mOutOfSequenceBuffers.empty()) {
968           break;
969        } else {
970             auto itr = mOutOfSequenceBuffers.begin();
971             super_frame = *itr;
972             frameIndex = super_frame->bufs[0]->buf_idx;
973             resultFrameNumber = mMemory.getFrameNumber(frameIndex);
974             lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
975             LOGE("Attempting to recover next frame: result Frame#: %d, resultIdx: %d, "
976                     "Lowest Frame#: %d, oldestBufIndex: %d",
977                     resultFrameNumber, frameIndex, lowestFrameNumber, oldestBufIndex);
978             if ((lowestFrameNumber != -1) && (lowestFrameNumber < resultFrameNumber)) {
979                 LOGE("Multiple frame dropped requesting cancel for frame %d, idx:%d",
980                         lowestFrameNumber, oldestBufIndex);
981                 stream->cancelBuffer(oldestBufIndex);
982                 return;
983              } else if (lowestFrameNumber == resultFrameNumber) {
984                 LOGE("Time to flush out head of list continue loop with this new super frame");
985                 itr = mOutOfSequenceBuffers.erase(itr);
986              } else {
987                 LOGE("Unexpected condition head of list is not the lowest frame number");
988                 itr = mOutOfSequenceBuffers.erase(itr);
989              }
990           }
991     } while (1);
992     return;
993 }
994 
995 /*===========================================================================
996  * FUNCTION   : putStreamBufs
997  *
998  * DESCRIPTION: release the buffers allocated to the stream
999  *
1000  * PARAMETERS : NONE
1001  *
1002  * RETURN     : NONE
1003  *==========================================================================*/
putStreamBufs()1004 void QCamera3YUVChannel::putStreamBufs()
1005 {
1006     QCamera3ProcessingChannel::putStreamBufs();
1007 
1008     // Free allocated heap buffer.
1009     mMemory.deallocate();
1010     // Clear free heap buffer list.
1011     mFreeHeapBufferList.clear();
1012     // Clear offlinePpInfoList
1013     mOfflinePpInfoList.clear();
1014 }
1015 
1016 /*===========================================================================
1017  * FUNCTION   : timeoutFrame
1018  *
1019  * DESCRIPTION: Method to indicate to channel that a given frame has take too
1020  *              long to be generated
1021  *
1022  * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
1023  *
1024  * RETURN     : int32_t type of status
1025  *              NO_ERROR  -- success
1026  *              none-zero failure code
1027  *==========================================================================*/
timeoutFrame(uint32_t frameNumber)1028 int32_t QCamera3ProcessingChannel::timeoutFrame(uint32_t frameNumber)
1029 {
1030     int32_t bufIdx;
1031 
1032     bufIdx = mMemory.getBufferIndex(frameNumber);
1033 
1034     if (bufIdx < 0) {
1035         LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
1036         return -1;
1037     }
1038 
1039     mStreams[0]->timeoutFrame(bufIdx);
1040     return NO_ERROR;
1041 }
1042 
1043 /*===========================================================================
1044  * FUNCTION   : postprocFail
1045  *
1046  * DESCRIPTION: notify clients about failing post-process requests.
1047  *
1048  * PARAMETERS :
1049  * @ppBuffer  : pointer to the pp buffer.
1050  *
1051  * RETURN     : 0 on success
1052  *              -EINVAL on invalid input
1053  *==========================================================================*/
postprocFail(qcamera_hal3_pp_buffer_t * ppBuffer)1054 int32_t QCamera3ProcessingChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
1055     if (ppBuffer == nullptr) {
1056         return BAD_VALUE;
1057     }
1058 
1059     if (ppBuffer->output == nullptr) {
1060         return BAD_VALUE;
1061     }
1062 
1063     camera3_stream_buffer_t result = {};
1064     result.buffer = ppBuffer->output;
1065 
1066     LOGE("Input frame number: %d dropped!", ppBuffer->frameNumber);
1067     result.stream = mCamera3Stream;
1068     result.status = CAMERA3_BUFFER_STATUS_ERROR;
1069     result.acquire_fence = -1;
1070     result.release_fence = -1;
1071     if (mChannelCB) {
1072         mChannelCB(NULL, &result, ppBuffer->frameNumber, false, mUserData);
1073     }
1074 
1075     return OK;
1076 }
1077 
1078 /*===========================================================================
1079  * FUNCTION   : request
1080  *
1081  * DESCRIPTION: handle the request - either with an input buffer or a direct
1082  *              output request
1083  *
1084  * PARAMETERS :
1085  * @buffer          : pointer to the output buffer
1086  * @frameNumber     : frame number of the request
1087  * @pInputBuffer    : pointer to input buffer if an input request
1088  * @metadata        : parameters associated with the request
1089  * @internalreq      : boolean to indicate if this is purely internal request
1090  *                    needing internal buffer allocation
1091  * @meteringonly    : boolean indicating metering only frame subset of internal
1092  *                    not consumed by postprocessor
1093  *
1094  * RETURN     : 0 on a success start of capture
1095  *              -EINVAL on invalid input
1096  *              -ENODEV on serious error
1097  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)1098 int32_t QCamera3ProcessingChannel::request(buffer_handle_t *buffer,
1099         uint32_t frameNumber,
1100         camera3_stream_buffer_t* pInputBuffer,
1101         metadata_buffer_t* metadata,
1102         int &indexUsed,
1103         __unused bool internalRequest = false,
1104         __unused bool meteringOnly = false)
1105 {
1106     int32_t rc = NO_ERROR;
1107     int index;
1108 
1109     if (NULL == buffer || NULL == metadata) {
1110         LOGE("Invalid buffer/metadata in channel request");
1111         return BAD_VALUE;
1112     }
1113 
1114     if (pInputBuffer) {
1115         //need to send to reprocessing
1116         LOGD("Got a request with input buffer, output streamType = %d", mStreamType);
1117         reprocess_config_t reproc_cfg;
1118         cam_dimension_t dim;
1119         memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1120         memset(&dim, 0, sizeof(dim));
1121         setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
1122         startPostProc(reproc_cfg);
1123 
1124         qcamera_fwk_input_pp_data_t *src_frame = NULL;
1125         src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
1126                 sizeof(qcamera_fwk_input_pp_data_t));
1127         if (src_frame == NULL) {
1128             LOGE("No memory for src frame");
1129             return NO_MEMORY;
1130         }
1131         rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber);
1132         if (NO_ERROR != rc) {
1133             LOGE("Error %d while setting framework input PP data", rc);
1134             free(src_frame);
1135             return rc;
1136         }
1137         LOGH("Post-process started");
1138         m_postprocessor.processData(src_frame);
1139     } else {
1140         index = mMemory.getMatchBufIndex((void*)buffer);
1141         if(index < 0) {
1142             rc = registerBuffer(buffer, mIsType);
1143             if (NO_ERROR != rc) {
1144                 LOGE("On-the-fly buffer registration failed %d",
1145                          rc);
1146                 return rc;
1147             }
1148 
1149             index = mMemory.getMatchBufIndex((void*)buffer);
1150             if (index < 0) {
1151                 LOGE("Could not find object among registered buffers");
1152                 return DEAD_OBJECT;
1153             }
1154         }
1155         rc = mMemory.markFrameNumber(index, frameNumber);
1156         if(rc != NO_ERROR) {
1157             LOGE("Error marking frame number:%d for index %d", frameNumber,
1158                 index);
1159             return rc;
1160         }
1161         if (m_bIsActive) {
1162             rc = mStreams[0]->bufDone(index);
1163             if(rc != NO_ERROR) {
1164                 LOGE("Failed to Q new buffer to stream");
1165                 mMemory.markFrameNumber(index, -1);
1166                 return rc;
1167             }
1168         }
1169         indexUsed = index;
1170     }
1171     return rc;
1172 }
1173 
1174 /*===========================================================================
1175  * FUNCTION   : initialize
1176  *
1177  * DESCRIPTION:
1178  *
1179  * PARAMETERS : isType : type of image stabilization on the buffer
1180  *
1181  * RETURN     : int32_t type of status
1182  *              NO_ERROR  -- success
1183  *              none-zero failure code
1184  *==========================================================================*/
initialize(__unused cam_is_type_t isType)1185 int32_t QCamera3ProcessingChannel::initialize(__unused cam_is_type_t isType)
1186 {
1187     int32_t rc = NO_ERROR;
1188     rc = mOfflineMetaMemory.allocateAll(sizeof(metadata_buffer_t));
1189     if (rc == NO_ERROR) {
1190         Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1191         mFreeOfflineMetaBuffersList.clear();
1192         for (uint32_t i = 0; i < mNumBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1);
1193                 i++) {
1194             mFreeOfflineMetaBuffersList.push_back(i);
1195         }
1196     } else {
1197         LOGE("Could not allocate offline meta buffers for input reprocess");
1198     }
1199     mOutOfSequenceBuffers.clear();
1200     return rc;
1201 }
1202 
1203 /*===========================================================================
1204  * FUNCTION   : registerBuffer
1205  *
1206  * DESCRIPTION: register streaming buffer to the channel object
1207  *
1208  * PARAMETERS :
1209  *   @buffer     : buffer to be registered
1210  *   @isType     : image stabilization type on the stream
1211  *
1212  * RETURN     : int32_t type of status
1213  *              NO_ERROR  -- success
1214  *              none-zero failure code
1215  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)1216 int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer,
1217         cam_is_type_t isType)
1218 {
1219     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REG_BUF);
1220     int rc = 0;
1221     mIsType = isType;
1222     cam_stream_type_t streamType;
1223 
1224     if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
1225         LOGE("Trying to register more buffers than initially requested");
1226         return BAD_VALUE;
1227     }
1228 
1229     if (0 == m_numStreams) {
1230         rc = initialize(mIsType);
1231         if (rc != NO_ERROR) {
1232             LOGE("Couldn't initialize camera stream %d", rc);
1233             return rc;
1234         }
1235     }
1236 
1237     streamType = mStreams[0]->getMyType();
1238     rc = mMemory.registerBuffer(buffer, streamType);
1239     if (ALREADY_EXISTS == rc) {
1240         return NO_ERROR;
1241     } else if (NO_ERROR != rc) {
1242         LOGE("Buffer %p couldn't be registered %d", buffer, rc);
1243         return rc;
1244     }
1245 
1246     return rc;
1247 }
1248 
registerBufferAndGetBufDef(buffer_handle_t * buffer,mm_camera_buf_def_t * frame)1249 int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer,
1250         mm_camera_buf_def_t *frame)
1251 {
1252     if (buffer == nullptr || frame == nullptr) {
1253         ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__);
1254         return BAD_VALUE;
1255     }
1256 
1257     status_t rc;
1258 
1259     // Get the buffer index.
1260     int index = mMemory.getMatchBufIndex((void*)buffer);
1261     if(index < 0) {
1262         // Register the buffer if it was not registered.
1263         rc = registerBuffer(buffer, mIsType);
1264         if (rc != OK) {
1265             ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1266             return rc;
1267         }
1268 
1269         index = mMemory.getMatchBufIndex((void*)buffer);
1270         if (index < 0) {
1271             ALOGE("%s: Could not find object among registered buffers", __FUNCTION__);
1272             return DEAD_OBJECT;
1273         }
1274     }
1275 
1276     cam_frame_len_offset_t offset = {};
1277     mStreams[0]->getFrameOffset(offset);
1278 
1279     // Get the buffer def.
1280     rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers);
1281     if (rc != 0) {
1282         ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
1283         return rc;
1284     }
1285 
1286     // Set the frame's stream ID because it's not set in getBufDef.
1287     frame->stream_id = mStreams[0]->getMyHandle();
1288     return 0;
1289 }
1290 
unregisterBuffer(mm_camera_buf_def_t * frame)1291 void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame)
1292 {
1293     if (frame == nullptr) {
1294         ALOGE("%s: frame is nullptr", __FUNCTION__);
1295         return;
1296     }
1297 
1298     mMemory.unregisterBuffer(frame->buf_idx);
1299 }
1300 
1301 /*===========================================================================
1302  * FUNCTION   : setFwkInputPPData
1303  *
1304  * DESCRIPTION: fill out the framework src frame information for reprocessing
1305  *
1306  * PARAMETERS :
1307  *   @src_frame         : input pp data to be filled out
1308  *   @pInputBuffer      : input buffer for reprocessing
1309  *   @reproc_cfg        : pointer to the reprocess config
1310  *   @metadata          : pointer to the metadata buffer
1311  *   @output_buffer     : output buffer for reprocessing; could be NULL if not
1312  *                        framework allocated
1313  *   @frameNumber       : frame number of the request
1314  *
1315  * RETURN     : int32_t type of status
1316  *              NO_ERROR  -- success
1317  *              none-zero failure code
1318  *==========================================================================*/
setFwkInputPPData(qcamera_fwk_input_pp_data_t * src_frame,camera3_stream_buffer_t * pInputBuffer,reprocess_config_t * reproc_cfg,metadata_buffer_t * metadata,buffer_handle_t * output_buffer,uint32_t frameNumber)1319 int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
1320         camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg,
1321         metadata_buffer_t *metadata, buffer_handle_t *output_buffer,
1322         uint32_t frameNumber)
1323 {
1324     int32_t rc = NO_ERROR;
1325     int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1326     if(input_index < 0) {
1327         rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
1328         if (NO_ERROR != rc) {
1329             LOGE("On-the-fly input buffer registration failed %d",
1330                      rc);
1331             return rc;
1332         }
1333         input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1334         if (input_index < 0) {
1335             LOGE("Could not find object among registered buffers");
1336             return DEAD_OBJECT;
1337         }
1338     }
1339     mOfflineMemory.markFrameNumber(input_index, frameNumber);
1340 
1341     src_frame->src_frame = *pInputBuffer;
1342     rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
1343             src_frame->input_buffer, input_index, mMapStreamBuffers);
1344     if (rc != 0) {
1345         return rc;
1346     }
1347     dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim,
1348             reproc_cfg->input_stream_plane_info.plane_info, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
1349     cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1350     cam_stream_buf_plane_info_t meta_planes;
1351     rc = mm_stream_calc_offset_metadata(&dim, &mPaddingInfo, &meta_planes);
1352     if (rc != 0) {
1353         LOGE("Metadata stream plane info calculation failed!");
1354         return rc;
1355     }
1356     uint32_t metaBufIdx;
1357     {
1358         Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1359         if (mFreeOfflineMetaBuffersList.empty()) {
1360             LOGE("mFreeOfflineMetaBuffersList is null. Fatal");
1361             return BAD_VALUE;
1362         }
1363 
1364         metaBufIdx = *(mFreeOfflineMetaBuffersList.begin());
1365         mFreeOfflineMetaBuffersList.erase(mFreeOfflineMetaBuffersList.begin());
1366         LOGD("erasing %d, mFreeOfflineMetaBuffersList.size %d", metaBufIdx,
1367                 mFreeOfflineMetaBuffersList.size());
1368     }
1369 
1370     mOfflineMetaMemory.markFrameNumber(metaBufIdx, frameNumber);
1371 
1372     mm_camera_buf_def_t meta_buf;
1373     cam_frame_len_offset_t offset = meta_planes.plane_info;
1374     rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx, true /*virtualAddr*/);
1375     if (NO_ERROR != rc) {
1376         return rc;
1377     }
1378     memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1379     src_frame->metadata_buffer = meta_buf;
1380     src_frame->reproc_config = *reproc_cfg;
1381     src_frame->output_buffer = output_buffer;
1382     src_frame->frameNumber = frameNumber;
1383     return rc;
1384 }
1385 
1386 /*===========================================================================
1387  * FUNCTION   : checkStreamCbErrors
1388  *
1389  * DESCRIPTION: check the stream callback for errors
1390  *
1391  * PARAMETERS :
1392  *   @super_frame : the super frame with filled buffer
1393  *   @stream      : stream on which the buffer was requested and filled
1394  *
1395  * RETURN     : int32_t type of status
1396  *              NO_ERROR  -- success
1397  *              none-zero failure code
1398  *==========================================================================*/
checkStreamCbErrors(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1399 int32_t QCamera3ProcessingChannel::checkStreamCbErrors(mm_camera_super_buf_t *super_frame,
1400         QCamera3Stream *stream)
1401 {
1402     if (NULL == stream) {
1403         LOGE("Invalid stream");
1404         return BAD_VALUE;
1405     }
1406 
1407     if(NULL == super_frame) {
1408          LOGE("Invalid Super buffer");
1409          return BAD_VALUE;
1410     }
1411 
1412     if(super_frame->num_bufs != 1) {
1413          LOGE("Multiple streams are not supported");
1414          return BAD_VALUE;
1415     }
1416     if(NULL == super_frame->bufs[0]) {
1417          LOGE("Error, Super buffer frame does not contain valid buffer");
1418          return BAD_VALUE;
1419     }
1420     return NO_ERROR;
1421 }
1422 
1423 /*===========================================================================
1424  * FUNCTION   : getStreamSize
1425  *
1426  * DESCRIPTION: get the size from the camera3_stream_t for the channel
1427  *
1428  * PARAMETERS :
1429  *   @dim     : Return the size of the stream
1430  *
1431  * RETURN     : int32_t type of status
1432  *              NO_ERROR  -- success
1433  *              none-zero failure code
1434  *==========================================================================*/
getStreamSize(cam_dimension_t & dim)1435 int32_t QCamera3ProcessingChannel::getStreamSize(cam_dimension_t &dim)
1436 {
1437     if (mCamera3Stream) {
1438         dim.width = mCamera3Stream->width;
1439         dim.height = mCamera3Stream->height;
1440         return NO_ERROR;
1441     } else {
1442         return BAD_VALUE;
1443     }
1444 }
1445 
1446 /*===========================================================================
1447  * FUNCTION   : getStreamBufs
1448  *
1449  * DESCRIPTION: get the buffers allocated to the stream
1450  *
1451  * PARAMETERS :
1452  * @len       : buffer length
1453  *
1454  * RETURN     : int32_t type of status
1455  *              NO_ERROR  -- success
1456  *              none-zero failure code
1457  *==========================================================================*/
getStreamBufs(uint32_t)1458 QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/)
1459 {
1460     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GETSTREAMBUFS);
1461     return &mMemory;
1462 }
1463 
1464 /*===========================================================================
1465  * FUNCTION   : putStreamBufs
1466  *
1467  * DESCRIPTION: release the buffers allocated to the stream
1468  *
1469  * PARAMETERS : NONE
1470  *
1471  * RETURN     : NONE
1472  *==========================================================================*/
putStreamBufs()1473 void QCamera3ProcessingChannel::putStreamBufs()
1474 {
1475     mMemory.unregisterBuffers();
1476 
1477     /* Reclaim all the offline metabuffers and push them to free list */
1478     {
1479         Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1480         mFreeOfflineMetaBuffersList.clear();
1481         for (uint32_t i = 0; i < mOfflineMetaMemory.getCnt(); i++) {
1482             mFreeOfflineMetaBuffersList.push_back(i);
1483         }
1484     }
1485 }
1486 
1487 
1488 /*===========================================================================
1489  * FUNCTION   : stop
1490  *
1491  * DESCRIPTION: stop processing channel, which will stop all streams within,
1492  *              including the reprocessing channel in postprocessor.
1493  *
1494  * PARAMETERS : none
1495  *
1496  * RETURN     : int32_t type of status
1497  *              NO_ERROR  -- success
1498  *              none-zero failure code
1499  *==========================================================================*/
stop()1500 int32_t QCamera3ProcessingChannel::stop()
1501 {
1502     if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
1503         KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
1504     }
1505     int32_t rc = NO_ERROR;
1506     if(!m_bIsActive) {
1507         LOGE("Attempt to stop inactive channel");
1508         return rc;
1509     }
1510 
1511     m_postprocessor.stop();
1512     mPostProcStarted = false;
1513     rc |= QCamera3Channel::stop();
1514     return rc;
1515 }
1516 
1517 /*===========================================================================
1518  * FUNCTION   : startPostProc
1519  *
1520  * DESCRIPTION: figure out if the postprocessor needs to be restarted and if yes
1521  *              start it
1522  *
1523  * PARAMETERS :
1524  * @inputBufExists : whether there is an input buffer for post processing
1525  * @config         : reprocessing configuration
1526  * @metadata       : metadata associated with the reprocessing request
1527  *
1528  * RETURN     : NONE
1529  *==========================================================================*/
startPostProc(const reprocess_config_t & config)1530 void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config)
1531 {
1532     if (mPostProcStarted) {
1533         if (config.reprocess_type != mReprocessType) {
1534             // If the reprocess type doesn't match, stop and start with the new type
1535             m_postprocessor.stop();
1536             mPostProcStarted = false;
1537         } else {
1538             // Return if reprocess type is the same.
1539             return;
1540         }
1541     }
1542 
1543     m_postprocessor.start(config);
1544     mPostProcStarted = true;
1545     mReprocessType = config.reprocess_type;
1546 }
1547 
1548 /*===========================================================================
1549  * FUNCTION   : queueReprocMetadata
1550  *
1551  * DESCRIPTION: queue the reprocess metadata to the postprocessor
1552  *
1553  * PARAMETERS : metadata : the metadata corresponding to the pp frame
1554  *
1555  * RETURN     : int32_t type of status
1556  *              NO_ERROR  -- success
1557  *              none-zero failure code
1558  *==========================================================================*/
queueReprocMetadata(mm_camera_super_buf_t * metadata)1559 int32_t QCamera3ProcessingChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
1560 {
1561     return m_postprocessor.processPPMetadata(metadata);
1562 }
1563 
1564 /*===========================================================================
1565  * FUNCTION : metadataBufDone
1566  *
1567  * DESCRIPTION: Buffer done method for a metadata buffer
1568  *
1569  * PARAMETERS :
1570  * @recvd_frame : received metadata frame
1571  *
1572  * RETURN     : int32_t type of status
1573  *              NO_ERROR  -- success
1574  *              none-zero failure code
1575  *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1576 int32_t QCamera3ProcessingChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1577 {
1578     int32_t rc = NO_ERROR;;
1579     if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1580         LOGE("Metadata channel or metadata buffer invalid");
1581         return BAD_VALUE;
1582     }
1583 
1584     rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1585 
1586     return rc;
1587 }
1588 
1589 /*===========================================================================
1590  * FUNCTION : translateStreamTypeAndFormat
1591  *
1592  * DESCRIPTION: translates the framework stream format into HAL stream type
1593  *              and format
1594  *
1595  * PARAMETERS :
1596  * @streamType   : translated stream type
1597  * @streamFormat : translated stream format
1598  * @stream       : fwk stream
1599  *
1600  * RETURN     : int32_t type of status
1601  *              NO_ERROR  -- success
1602  *              none-zero failure code
1603  *==========================================================================*/
translateStreamTypeAndFormat(camera3_stream_t * stream,cam_stream_type_t & streamType,cam_format_t & streamFormat)1604 int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream,
1605         cam_stream_type_t &streamType, cam_format_t &streamFormat)
1606 {
1607     switch (stream->format) {
1608         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1609             if(stream->stream_type == CAMERA3_STREAM_INPUT){
1610                 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1611                 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1612                         stream->width, stream->height, m_bUBWCenable, mIsType);
1613             } else {
1614                 streamType = CAM_STREAM_TYPE_CALLBACK;
1615                 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK,
1616                         stream->width, stream->height, m_bUBWCenable, mIsType);
1617             }
1618             break;
1619         case HAL_PIXEL_FORMAT_Y8:
1620             streamType = CAM_STREAM_TYPE_CALLBACK;
1621             streamFormat = CAM_FORMAT_Y_ONLY;
1622             break;
1623         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1624             if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
1625                 streamType = CAM_STREAM_TYPE_VIDEO;
1626                 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_VIDEO,
1627                         stream->width, stream->height, m_bUBWCenable, mIsType);
1628             } else if(stream->stream_type == CAMERA3_STREAM_INPUT ||
1629                     stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1630                     IS_USAGE_ZSL(stream->usage)){
1631                 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1632                 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1633                         stream->width, stream->height, m_bUBWCenable, mIsType);
1634             } else {
1635                 streamType = CAM_STREAM_TYPE_PREVIEW;
1636                 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
1637                         stream->width, stream->height, m_bUBWCenable, mIsType);
1638             }
1639             break;
1640         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1641         case HAL_PIXEL_FORMAT_RAW16:
1642         case HAL_PIXEL_FORMAT_RAW10:
1643             streamType = CAM_STREAM_TYPE_RAW;
1644             if ((HAL_DATASPACE_DEPTH == stream->data_space) &&
1645                     (HAL_PIXEL_FORMAT_RAW16 == stream->format)) {
1646                 streamFormat = CAM_FORMAT_META_RAW_10BIT;
1647             } else {
1648                 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
1649             }
1650             break;
1651         default:
1652             return -EINVAL;
1653     }
1654     LOGD("fwk_format = %d, streamType = %d, streamFormat = %d",
1655             stream->format, streamType, streamFormat);
1656     return NO_ERROR;
1657 }
1658 
1659 /*===========================================================================
1660  * FUNCTION : setReprocConfig
1661  *
1662  * DESCRIPTION: sets the reprocessing parameters for the input buffer
1663  *
1664  * PARAMETERS :
1665  * @reproc_cfg : the configuration to be set
1666  * @pInputBuffer : pointer to the input buffer
1667  * @metadata : pointer to the reprocessing metadata buffer
1668  * @streamFormat : format of the input stream
1669  *
1670  * RETURN     : int32_t type of status
1671  *              NO_ERROR  -- success
1672  *              none-zero failure code
1673  *==========================================================================*/
setReprocConfig(reprocess_config_t & reproc_cfg,camera3_stream_buffer_t * pInputBuffer,__unused metadata_buffer_t * metadata,cam_format_t streamFormat,cam_dimension_t dim)1674 int32_t QCamera3ProcessingChannel::setReprocConfig(reprocess_config_t &reproc_cfg,
1675         camera3_stream_buffer_t *pInputBuffer,
1676         __unused metadata_buffer_t *metadata,
1677         cam_format_t streamFormat, cam_dimension_t dim)
1678 {
1679     int32_t rc = 0;
1680     reproc_cfg.padding = &mPaddingInfo;
1681     cam_stream_info_t info = {.fmt = reproc_cfg.stream_format};
1682     //to ensure a big enough buffer size set the height and width
1683     //padding to max(height padding, width padding)
1684     if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1685        reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1686     } else {
1687        reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1688     }
1689     if (NULL != pInputBuffer) {
1690         reproc_cfg.input_stream_dim.width = (int32_t)pInputBuffer->stream->width;
1691         reproc_cfg.input_stream_dim.height = (int32_t)pInputBuffer->stream->height;
1692     } else {
1693         reproc_cfg.input_stream_dim.width = (int32_t)dim.width;
1694         reproc_cfg.input_stream_dim.height = (int32_t)dim.height;
1695     }
1696     reproc_cfg.src_channel = this;
1697     reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1698     reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1699     reproc_cfg.reprocess_type = getReprocessType();
1700     reproc_cfg.stream_format = streamFormat;
1701 
1702     //offset calculation
1703     if (NULL != pInputBuffer) {
1704         rc = translateStreamTypeAndFormat(pInputBuffer->stream,
1705                 reproc_cfg.stream_type, reproc_cfg.input_stream_format);
1706         if (rc != NO_ERROR) {
1707             LOGE("Stream format %d is not supported",
1708                     pInputBuffer->stream->format);
1709             return rc;
1710         }
1711     } else {
1712         reproc_cfg.stream_type = mStreamType;
1713         reproc_cfg.input_stream_format = streamFormat;
1714     }
1715 
1716     switch (reproc_cfg.stream_type) {
1717         case CAM_STREAM_TYPE_PREVIEW:
1718             if (getStreamByIndex(0) == NULL) {
1719                 LOGE("Could not find stream");
1720                 rc = -1;
1721                 break;
1722             }
1723             rc = mm_stream_calc_offset_preview(
1724                     getStreamByIndex(0)->getStreamInfo(),
1725                     &reproc_cfg.input_stream_dim,
1726                     reproc_cfg.padding,
1727                     &reproc_cfg.input_stream_plane_info);
1728             break;
1729         case CAM_STREAM_TYPE_VIDEO:
1730             rc = mm_stream_calc_offset_video(reproc_cfg.stream_format,
1731                     &reproc_cfg.input_stream_dim,
1732                     &reproc_cfg.input_stream_plane_info);
1733             break;
1734         case CAM_STREAM_TYPE_RAW:
1735             rc = mm_stream_calc_offset_raw(&info,
1736                     &reproc_cfg.input_stream_dim,
1737                     reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1738             break;
1739         case CAM_STREAM_TYPE_SNAPSHOT:
1740         case CAM_STREAM_TYPE_CALLBACK:
1741         default:
1742             rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim,
1743                     reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1744             break;
1745     }
1746     if (rc != 0) {
1747         LOGE("Stream %d plane info calculation failed!", mStreamType);
1748         return rc;
1749     }
1750 
1751     IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
1752         reproc_cfg.hdr_param = *hdr_info;
1753     }
1754 
1755     return rc;
1756 }
1757 
1758 /*===========================================================================
1759  * FUNCTION   : reprocessCbRoutine
1760  *
1761  * DESCRIPTION: callback function for the reprocessed frame. This frame now
1762  *              should be returned to the framework
1763  *
1764  * PARAMETERS :
1765  * @resultBuffer      : buffer containing the reprocessed data
1766  * @resultFrameNumber : frame number on which the buffer was requested
1767  *
1768  * RETURN     : NONE
1769  *
1770  *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1771 void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
1772         uint32_t resultFrameNumber)
1773 {
1774     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REPROC_CB);
1775     int rc = NO_ERROR;
1776 
1777     rc = releaseOfflineMemory(resultFrameNumber);
1778     if (NO_ERROR != rc) {
1779         LOGE("Error releasing offline memory %d", rc);
1780     }
1781     /* Since reprocessing is done, send the callback to release the input buffer */
1782     if (mChannelCB) {
1783         mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
1784     }
1785     issueChannelCb(resultBuffer, resultFrameNumber);
1786 
1787     return;
1788 }
1789 
1790 /*===========================================================================
1791  * FUNCTION   : issueChannelCb
1792  *
1793  * DESCRIPTION: function to set the result and issue channel callback
1794  *
1795  * PARAMETERS :
1796  * @resultBuffer      : buffer containing the data
1797  * @resultFrameNumber : frame number on which the buffer was requested
1798  *
1799  * RETURN     : NONE
1800  *
1801  *
1802  *==========================================================================*/
issueChannelCb(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1803 void QCamera3ProcessingChannel::issueChannelCb(buffer_handle_t *resultBuffer,
1804         uint32_t resultFrameNumber)
1805 {
1806     camera3_stream_buffer_t result;
1807     //Use below data to issue framework callback
1808     result.stream = mCamera3Stream;
1809     result.buffer = resultBuffer;
1810     result.status = CAMERA3_BUFFER_STATUS_OK;
1811     result.acquire_fence = -1;
1812     result.release_fence = -1;
1813 
1814     if (mChannelCB) {
1815         mChannelCB(NULL, &result, resultFrameNumber, false, mUserData);
1816     }
1817 }
1818 
1819 /*===========================================================================
1820  * FUNCTION   : showDebugFPS
1821  *
1822  * DESCRIPTION: Function to log the fps for preview, video, callback and raw
1823  *              streams
1824  *
1825  * PARAMETERS : Stream type
1826  *
1827  * RETURN  : None
1828  *==========================================================================*/
showDebugFPS(int32_t streamType)1829 void QCamera3ProcessingChannel::showDebugFPS(int32_t streamType)
1830 {
1831     double fps = 0;
1832     mFrameCount++;
1833     nsecs_t now = systemTime();
1834     nsecs_t diff = now - mLastFpsTime;
1835     if (diff > ms2ns(250)) {
1836         fps = (((double)(mFrameCount - mLastFrameCount)) *
1837                 (double)(s2ns(1))) / (double)diff;
1838         switch(streamType) {
1839             case CAM_STREAM_TYPE_PREVIEW:
1840                 LOGH("PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f: mFrameCount=%d",
1841                          fps, mFrameCount);
1842                 break;
1843             case CAM_STREAM_TYPE_VIDEO:
1844                 LOGH("PROFILE_VIDEO_FRAMES_PER_SECOND : %.4f",
1845                          fps);
1846                 break;
1847             case CAM_STREAM_TYPE_CALLBACK:
1848                 LOGH("PROFILE_CALLBACK_FRAMES_PER_SECOND : %.4f",
1849                          fps);
1850                 break;
1851             case CAM_STREAM_TYPE_RAW:
1852                 LOGH("PROFILE_RAW_FRAMES_PER_SECOND : %.4f",
1853                          fps);
1854                 break;
1855             default:
1856                 LOGH("logging not supported for the stream");
1857                 break;
1858         }
1859         mLastFpsTime = now;
1860         mLastFrameCount = mFrameCount;
1861     }
1862 }
1863 
1864 /*===========================================================================
1865  * FUNCTION   : releaseOfflineMemory
1866  *
1867  * DESCRIPTION: function to clean up the offline memory used for input reprocess
1868  *
1869  * PARAMETERS :
1870  * @resultFrameNumber : frame number on which the buffer was requested
1871  *
1872  * RETURN     : int32_t type of status
1873  *              NO_ERROR  -- success
1874  *              non-zero failure code
1875  *
1876  *
1877  *==========================================================================*/
releaseOfflineMemory(uint32_t resultFrameNumber)1878 int32_t QCamera3ProcessingChannel::releaseOfflineMemory(uint32_t resultFrameNumber)
1879 {
1880     int32_t rc = NO_ERROR;
1881     int32_t inputBufIndex =
1882             mOfflineMemory.getGrallocBufferIndex(resultFrameNumber);
1883     if (0 <= inputBufIndex) {
1884         rc = mOfflineMemory.unregisterBuffer(inputBufIndex);
1885     } else {
1886         LOGW("Could not find offline input buffer, resultFrameNumber %d",
1887                  resultFrameNumber);
1888     }
1889     if (rc != NO_ERROR) {
1890         LOGE("Failed to unregister offline input buffer");
1891     }
1892 
1893     int32_t metaBufIndex =
1894             mOfflineMetaMemory.getHeapBufferIndex(resultFrameNumber);
1895     if (0 <= metaBufIndex) {
1896         Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1897         mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
1898     } else {
1899         LOGW("Could not find offline meta buffer, resultFrameNumber %d",
1900                 resultFrameNumber);
1901     }
1902 
1903     return rc;
1904 }
1905 
1906 /* Regular Channel methods */
1907 /*===========================================================================
1908  * FUNCTION   : QCamera3RegularChannel
1909  *
1910  * DESCRIPTION: constructor of QCamera3RegularChannel
1911  *
1912  * PARAMETERS :
1913  *   @cam_handle : camera handle
1914  *   @cam_ops    : ptr to camera ops table
1915  *   @cb_routine : callback routine to frame aggregator
1916  *   @stream     : camera3_stream_t structure
1917  *   @stream_type: Channel stream type
1918  *   @postprocess_mask: feature mask for postprocessing
1919  *   @metadataChannel : metadata channel for the session
1920  *   @numBuffers : number of max dequeued buffers
1921  *
1922  * RETURN     : none
1923  *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)1924 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
1925         uint32_t channel_handle,
1926         mm_camera_ops_t *cam_ops,
1927         channel_cb_routine cb_routine,
1928         channel_cb_buffer_err cb_buffer_err,
1929         cam_padding_info_t *paddingInfo,
1930         void *userData,
1931         camera3_stream_t *stream,
1932         cam_stream_type_t stream_type,
1933         cam_feature_mask_t postprocess_mask,
1934         QCamera3Channel *metadataChannel,
1935         uint32_t numBuffers) :
1936             QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
1937                     cb_routine, cb_buffer_err, paddingInfo, userData, stream, stream_type,
1938                     postprocess_mask, metadataChannel, numBuffers),
1939             mBatchSize(0),
1940             mRotation(ROTATE_0)
1941 {
1942 }
1943 
1944 /*===========================================================================
1945  * FUNCTION   : ~QCamera3RegularChannel
1946  *
1947  * DESCRIPTION: destructor of QCamera3RegularChannel
1948  *
1949  * PARAMETERS : none
1950  *
1951  * RETURN     : none
1952  *==========================================================================*/
~QCamera3RegularChannel()1953 QCamera3RegularChannel::~QCamera3RegularChannel()
1954 {
1955     destroy();
1956 }
1957 
1958 /*===========================================================================
1959  * FUNCTION   : initialize
1960  *
1961  * DESCRIPTION: Initialize and add camera channel & stream
1962  *
1963  * PARAMETERS :
1964  *    @isType : type of image stabilization required on this stream
1965  *
1966  * RETURN     : int32_t type of status
1967  *              NO_ERROR  -- success
1968  *              none-zero failure code
1969  *==========================================================================*/
1970 
initialize(cam_is_type_t isType)1971 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
1972 {
1973     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_INIT);
1974     int32_t rc = NO_ERROR;
1975 
1976     cam_dimension_t streamDim;
1977 
1978     if (NULL == mCamera3Stream) {
1979         LOGE("Camera stream uninitialized");
1980         return NO_INIT;
1981     }
1982 
1983     if (1 <= m_numStreams) {
1984         // Only one stream per channel supported in v3 Hal
1985         return NO_ERROR;
1986     }
1987 
1988     mIsType  = isType;
1989 
1990     rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
1991             mStreamFormat);
1992     if (rc != NO_ERROR) {
1993         return -EINVAL;
1994     }
1995 
1996 
1997     if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
1998             (mStreamType == CAM_STREAM_TYPE_PREVIEW)) {
1999         if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) &&
2000                 ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) {
2001             LOGE("attempting rotation %d when rotation is disabled",
2002                     mCamera3Stream->rotation);
2003             return -EINVAL;
2004         }
2005 
2006         switch (mCamera3Stream->rotation) {
2007             case CAMERA3_STREAM_ROTATION_0:
2008                 mRotation = ROTATE_0;
2009                 break;
2010             case CAMERA3_STREAM_ROTATION_90: {
2011                 mRotation = ROTATE_90;
2012                 break;
2013             }
2014             case CAMERA3_STREAM_ROTATION_180:
2015                 mRotation = ROTATE_180;
2016                 break;
2017             case CAMERA3_STREAM_ROTATION_270: {
2018                 mRotation = ROTATE_270;
2019                 break;
2020             }
2021             default:
2022                 LOGE("Unknown rotation: %d",
2023                          mCamera3Stream->rotation);
2024             return -EINVAL;
2025         }
2026 
2027         // Camera3/HAL3 spec expecting counter clockwise rotation but CPP HW is
2028         // doing Clockwise rotation and so swap it.
2029         if (mRotation == ROTATE_90) {
2030             mRotation = ROTATE_270;
2031         } else if (mRotation == ROTATE_270) {
2032             mRotation = ROTATE_90;
2033         }
2034 
2035     } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) {
2036         LOGE("Rotation %d is not supported by stream type %d",
2037                 mCamera3Stream->rotation,
2038                 mStreamType);
2039         return -EINVAL;
2040     }
2041 
2042     streamDim.width = mCamera3Stream->width;
2043     streamDim.height = mCamera3Stream->height;
2044 
2045     LOGD("batch size is %d", mBatchSize);
2046     rc = QCamera3Channel::addStream(mStreamType,
2047             mStreamFormat,
2048             streamDim,
2049             mRotation,
2050             mNumBufs,
2051             mPostProcMask,
2052             mIsType,
2053             mBatchSize);
2054 
2055     return rc;
2056 }
2057 
2058 /*===========================================================================
2059  * FUNCTION   : setBatchSize
2060  *
2061  * DESCRIPTION: Set batch size for the channel.
2062  *
2063  * PARAMETERS :
2064  *   @batchSize  : Number of image buffers in a batch
2065  *
2066  * RETURN     : int32_t type of status
2067  *              NO_ERROR  -- success always
2068  *              none-zero failure code
2069  *==========================================================================*/
setBatchSize(uint32_t batchSize)2070 int32_t QCamera3RegularChannel::setBatchSize(uint32_t batchSize)
2071 {
2072     int32_t rc = NO_ERROR;
2073 
2074     mBatchSize = batchSize;
2075     LOGD("Batch size set: %d", mBatchSize);
2076     return rc;
2077 }
2078 
2079 /*===========================================================================
2080  * FUNCTION   : getStreamTypeMask
2081  *
2082  * DESCRIPTION: Get bit mask of all stream types in this channel.
2083  *              If stream is not initialized, then generate mask based on
2084  *              local streamType
2085  *
2086  * PARAMETERS : None
2087  *
2088  * RETURN     : Bit mask of all stream types in this channel
2089  *==========================================================================*/
getStreamTypeMask()2090 uint32_t QCamera3RegularChannel::getStreamTypeMask()
2091 {
2092     if (mStreams[0]) {
2093         return QCamera3Channel::getStreamTypeMask();
2094     } else {
2095         return (1U << mStreamType);
2096     }
2097 }
2098 
2099 /*===========================================================================
2100  * FUNCTION   : queueBatchBuf
2101  *
2102  * DESCRIPTION: queue batch container to downstream
2103  *
2104  * PARAMETERS :
2105  *
2106  * RETURN     : int32_t type of status
2107  *              NO_ERROR  -- success always
2108  *              none-zero failure code
2109  *==========================================================================*/
queueBatchBuf()2110 int32_t QCamera3RegularChannel::queueBatchBuf()
2111 {
2112     int32_t rc = NO_ERROR;
2113 
2114     if (mStreams[0]) {
2115         rc = mStreams[0]->queueBatchBuf();
2116     }
2117     if (rc != NO_ERROR) {
2118         LOGE("stream->queueBatchContainer failed");
2119     }
2120     return rc;
2121 }
2122 
2123 /*===========================================================================
2124  * FUNCTION   : request
2125  *
2126  * DESCRIPTION: process a request from camera service. Stream on if ncessary.
2127  *
2128  * PARAMETERS :
2129  *   @buffer  : buffer to be filled for this request
2130  *
2131  * RETURN     : 0 on a success start of capture
2132  *              -EINVAL on invalid input
2133  *              -ENODEV on serious error
2134  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,int & indexUsed)2135 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber, int &indexUsed)
2136 {
2137     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_REQ);
2138     //FIX ME: Return buffer back in case of failures below.
2139 
2140     int32_t rc = NO_ERROR;
2141     int index;
2142 
2143     if (NULL == buffer) {
2144         LOGE("Invalid buffer in channel request");
2145         return BAD_VALUE;
2146     }
2147 
2148     index = mMemory.getMatchBufIndex((void*)buffer);
2149     if(index < 0) {
2150         rc = registerBuffer(buffer, mIsType);
2151         if (NO_ERROR != rc) {
2152             LOGE("On-the-fly buffer registration failed %d",
2153                      rc);
2154             return rc;
2155         }
2156 
2157         index = mMemory.getMatchBufIndex((void*)buffer);
2158         if (index < 0) {
2159             LOGE("Could not find object among registered buffers");
2160             return DEAD_OBJECT;
2161         }
2162     }
2163 
2164     rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
2165     if(rc != NO_ERROR) {
2166         LOGE("Failed to mark FrameNumber:%d,idx:%d",frameNumber,index);
2167         return rc;
2168     }
2169     if (m_bIsActive) {
2170         rc = mStreams[0]->bufDone((uint32_t)index);
2171         if(rc != NO_ERROR) {
2172             LOGE("Failed to Q new buffer to stream");
2173             mMemory.markFrameNumber(index, -1);
2174             return rc;
2175         }
2176     }
2177 
2178     indexUsed = index;
2179     return rc;
2180 }
2181 
2182 /*===========================================================================
2183  * FUNCTION   : getReprocessType
2184  *
2185  * DESCRIPTION: get the type of reprocess output supported by this channel
2186  *
2187  * PARAMETERS : NONE
2188  *
2189  * RETURN     : reprocess_type_t : type of reprocess
2190  *==========================================================================*/
getReprocessType()2191 reprocess_type_t QCamera3RegularChannel::getReprocessType()
2192 {
2193     return REPROCESS_TYPE_PRIVATE;
2194 }
2195 
2196 
QCamera3MetadataChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)2197 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
2198                     uint32_t channel_handle,
2199                     mm_camera_ops_t *cam_ops,
2200                     channel_cb_routine cb_routine,
2201                     channel_cb_buffer_err cb_buffer_err,
2202                     cam_padding_info_t *paddingInfo,
2203                     cam_feature_mask_t postprocess_mask,
2204                     void *userData, uint32_t numBuffers) :
2205                         QCamera3Channel(cam_handle, channel_handle, cam_ops,
2206                                 cb_routine, cb_buffer_err, paddingInfo, postprocess_mask,
2207                                 userData, numBuffers),
2208                         mMemory(NULL), mDepthDataPresent(false)
2209 {
2210     mMapStreamBuffers = true;
2211 }
2212 
~QCamera3MetadataChannel()2213 QCamera3MetadataChannel::~QCamera3MetadataChannel()
2214 {
2215     destroy();
2216 
2217     if (mMemory) {
2218         mMemory->deallocate();
2219         delete mMemory;
2220         mMemory = NULL;
2221     }
2222 }
2223 
initialize(cam_is_type_t isType)2224 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
2225 {
2226     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_METADATA_CH_INIT);
2227     int32_t rc;
2228     cam_dimension_t streamDim;
2229 
2230     if (mMemory || m_numStreams > 0) {
2231         LOGE("metadata channel already initialized");
2232         return -EINVAL;
2233     }
2234 
2235     streamDim.width = (int32_t)sizeof(metadata_buffer_t),
2236     streamDim.height = 1;
2237 
2238     mIsType = isType;
2239     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
2240             streamDim, ROTATE_0, (uint8_t)mNumBuffers, mPostProcMask, mIsType);
2241     if (rc < 0) {
2242         LOGE("addStream failed");
2243     }
2244     return rc;
2245 }
2246 
request(buffer_handle_t *,uint32_t,int &)2247 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
2248                                                 uint32_t /*frameNumber*/,
2249                                                 int&  /*indexUsed*/)
2250 {
2251     if (!m_bIsActive) {
2252         return start();
2253     }
2254     else
2255         return 0;
2256 }
2257 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)2258 void QCamera3MetadataChannel::streamCbRoutine(
2259                         mm_camera_super_buf_t *super_frame,
2260                         QCamera3Stream * /*stream*/)
2261 {
2262     ATRACE_NAME("metadata_stream_cb_routine");
2263     uint32_t requestNumber = 0;
2264     if (super_frame == NULL || super_frame->num_bufs != 1) {
2265         LOGE("super_frame is not valid");
2266         return;
2267     }
2268     if (mChannelCB) {
2269         mChannelCB(super_frame, NULL, requestNumber, false, mUserData);
2270     }
2271 }
2272 
getStreamBufs(uint32_t len)2273 QCamera3StreamMem* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
2274 {
2275     int rc;
2276     if (len < sizeof(metadata_buffer_t)) {
2277         LOGE("Metadata buffer size less than structure %d vs %d",
2278                 len,
2279                 sizeof(metadata_buffer_t));
2280         return NULL;
2281     }
2282     mMemory = new QCamera3StreamMem(MIN_STREAMING_BUFFER_NUM);
2283     if (!mMemory) {
2284         LOGE("unable to create metadata memory");
2285         return NULL;
2286     }
2287     rc = mMemory->allocateAll(len);
2288     if (rc < 0) {
2289         LOGE("unable to allocate metadata memory");
2290         delete mMemory;
2291         mMemory = NULL;
2292         return NULL;
2293     }
2294     clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0));
2295 
2296     for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2297         if (mMemory->valid(i)) {
2298             metadata_buffer_t *metadata_buffer_t =
2299                     static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2300             metadata_buffer_t->depth_data.depth_data = nullptr;
2301             if (mDepthDataPresent) {
2302                 metadata_buffer_t->depth_data.depth_data =
2303                         new uint8_t[PD_DATA_SIZE];
2304             }
2305         } else {
2306             LOGE("Invalid meta buffer at index: %d", i);
2307         }
2308     }
2309 
2310     return mMemory;
2311 }
2312 
putStreamBufs()2313 void QCamera3MetadataChannel::putStreamBufs()
2314 {
2315     for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2316         if (mMemory->valid(i)) {
2317             metadata_buffer_t *metadata_buffer_t =
2318                     static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2319             if (nullptr != metadata_buffer_t->depth_data.depth_data) {
2320                 delete [] metadata_buffer_t->depth_data.depth_data;
2321                 metadata_buffer_t->depth_data.depth_data = nullptr;
2322             }
2323         } else {
2324             LOGE("Invalid meta buffer at index: %d", i);
2325         }
2326     }
2327 
2328     mMemory->deallocate();
2329     delete mMemory;
2330     mMemory = NULL;
2331 }
2332 /*************************************************************************************/
2333 // RAW Channel related functions
QCamera3RawChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,bool raw_16,uint32_t numBuffers)2334 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
2335                     uint32_t channel_handle,
2336                     mm_camera_ops_t *cam_ops,
2337                     channel_cb_routine cb_routine,
2338                     channel_cb_buffer_err cb_buffer_err,
2339                     cam_padding_info_t *paddingInfo,
2340                     void *userData,
2341                     camera3_stream_t *stream,
2342                     cam_feature_mask_t postprocess_mask,
2343                     QCamera3Channel *metadataChannel,
2344                     bool raw_16, uint32_t numBuffers) :
2345                         QCamera3RegularChannel(cam_handle, channel_handle, cam_ops,
2346                                 cb_routine, cb_buffer_err, paddingInfo, userData, stream,
2347                                 CAM_STREAM_TYPE_RAW, postprocess_mask,
2348                                 metadataChannel, numBuffers),
2349                         mIsRaw16(raw_16)
2350 {
2351     char prop[PROPERTY_VALUE_MAX];
2352     property_get("persist.camera.raw.debug.dump", prop, "0");
2353     mRawDump = atoi(prop);
2354     mMapStreamBuffers = (mRawDump || mIsRaw16);
2355 }
2356 
~QCamera3RawChannel()2357 QCamera3RawChannel::~QCamera3RawChannel()
2358 {
2359 }
2360 
2361 /*===========================================================================
2362  * FUNCTION   : initialize
2363  *
2364  * DESCRIPTION: Initialize and add camera channel & stream
2365  *
2366  * PARAMETERS :
2367  * @isType    : image stabilization type on the stream
2368  *
2369  * RETURN     : int32_t type of status
2370  *              NO_ERROR  -- success
2371  *              none-zero failure code
2372  *==========================================================================*/
2373 
initialize(cam_is_type_t isType)2374 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
2375 {
2376     return QCamera3RegularChannel::initialize(isType);
2377 }
2378 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2379 void QCamera3RawChannel::streamCbRoutine(
2380                         mm_camera_super_buf_t *super_frame,
2381                         QCamera3Stream * stream)
2382 {
2383     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_RAW_CH_STRM_CB);
2384     /* Move this back down once verified */
2385     if (mRawDump)
2386         dumpRawSnapshot(super_frame->bufs[0]);
2387 
2388     if (mIsRaw16) {
2389         cam_format_t streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_RAW,
2390                 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType);
2391         if (streamFormat == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
2392             convertMipiToRaw16(super_frame->bufs[0]);
2393         else
2394             convertLegacyToRaw16(super_frame->bufs[0]);
2395 
2396         //Make sure cache coherence because extra processing is done
2397         mMemory.cleanCache(super_frame->bufs[0]->buf_idx);
2398     }
2399 
2400     QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
2401     return;
2402 }
2403 
dumpRawSnapshot(mm_camera_buf_def_t * frame)2404 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2405 {
2406    QCamera3Stream *stream = getStreamByIndex(0);
2407    if (stream != NULL) {
2408        char buf[FILENAME_MAX];
2409        memset(buf, 0, sizeof(buf));
2410        cam_dimension_t dim;
2411        memset(&dim, 0, sizeof(dim));
2412        stream->getFrameDimension(dim);
2413 
2414        cam_frame_len_offset_t offset;
2415        memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2416        stream->getFrameOffset(offset);
2417        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw",
2418                 frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline);
2419 
2420        int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
2421        if (file_fd >= 0) {
2422           ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len);
2423           LOGD("written number of bytes %zd", written_len);
2424           frame->cache_flags |= CPU_HAS_READ;
2425           close(file_fd);
2426        } else {
2427           LOGE("failed to open file to dump image");
2428        }
2429    } else {
2430        LOGE("Could not find stream");
2431    }
2432 
2433 }
2434 
convertLegacyToRaw16(mm_camera_buf_def_t * frame)2435 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
2436 {
2437     // Convert image buffer from Opaque raw format to RAW16 format
2438     // 10bit Opaque raw is stored in the format of:
2439     // 0000 - p5 - p4 - p3 - p2 - p1 - p0
2440     // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
2441     // 4 bits are 0s. Each 64bit word contains 6 pixels.
2442 
2443   QCamera3Stream *stream = getStreamByIndex(0);
2444   if (stream != NULL) {
2445       cam_dimension_t dim;
2446       memset(&dim, 0, sizeof(dim));
2447       stream->getFrameDimension(dim);
2448 
2449       cam_frame_len_offset_t offset;
2450       memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2451       stream->getFrameOffset(offset);
2452 
2453       uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2454       uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2455 
2456       // In-place format conversion.
2457       // Raw16 format always occupy more memory than opaque raw10.
2458       // Convert to Raw16 by iterating through all pixels from bottom-right
2459       // to top-left of the image.
2460       // One special notes:
2461       // 1. Cross-platform raw16's stride is 16 pixels.
2462       // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
2463       for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2464           uint32_t y = (uint32_t)ys;
2465           uint64_t* row_start = (uint64_t *)frame->buffer +
2466                   y * (uint32_t)offset.mp[0].stride_in_bytes / 8;
2467           for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2468               uint32_t x = (uint32_t)xs;
2469               uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
2470               raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2471           }
2472       }
2473   } else {
2474       LOGE("Could not find stream");
2475   }
2476 
2477 }
2478 
convertMipiToRaw16(mm_camera_buf_def_t * frame)2479 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
2480 {
2481     // Convert image buffer from mipi10 raw format to RAW16 format
2482     // mipi10 opaque raw is stored in the format of:
2483     // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
2484     // 4 pixels occupy 5 bytes, no padding needed
2485 
2486     QCamera3Stream *stream = getStreamByIndex(0);
2487     if (stream != NULL) {
2488         cam_dimension_t dim;
2489         memset(&dim, 0, sizeof(dim));
2490         stream->getFrameDimension(dim);
2491 
2492         cam_frame_len_offset_t offset;
2493         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2494         stream->getFrameOffset(offset);
2495 
2496         uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2497         uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2498         uint8_t first_quintuple[5];
2499         memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple));
2500 
2501         // In-place format conversion.
2502         // Raw16 format always occupy more memory than opaque raw10.
2503         // Convert to Raw16 by iterating through all pixels from bottom-right
2504         // to top-left of the image.
2505         // One special notes:
2506         // 1. Cross-platform raw16's stride is 16 pixels.
2507         // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
2508         for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2509             uint32_t y = (uint32_t)ys;
2510             uint8_t* row_start = (uint8_t *)frame->buffer +
2511                     y * (uint32_t)offset.mp[0].stride_in_bytes;
2512             for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2513                 uint32_t x = (uint32_t)xs;
2514                 uint8_t upper_8bit = row_start[5*(x/4)+x%4];
2515                 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> ((x%4) << 1)) & 0x3);
2516                 uint16_t raw16_pixel =
2517                         (uint16_t)(((uint16_t)upper_8bit)<<2 |
2518                         (uint16_t)lower_2bit);
2519                 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2520             }
2521         }
2522 
2523         // Re-convert the first 2 pixels of the buffer because the loop above messes
2524         // them up by reading the first quintuple while modifying it.
2525         raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3);
2526         raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3);
2527 
2528     } else {
2529         LOGE("Could not find stream");
2530     }
2531 
2532 }
2533 
2534 /*===========================================================================
2535  * FUNCTION   : getReprocessType
2536  *
2537  * DESCRIPTION: get the type of reprocess output supported by this channel
2538  *
2539  * PARAMETERS : NONE
2540  *
2541  * RETURN     : reprocess_type_t : type of reprocess
2542  *==========================================================================*/
getReprocessType()2543 reprocess_type_t QCamera3RawChannel::getReprocessType()
2544 {
2545     return REPROCESS_TYPE_RAW;
2546 }
2547 
2548 
2549 /*************************************************************************************/
2550 // RAW Dump Channel related functions
2551 
2552 /*===========================================================================
2553  * FUNCTION   : QCamera3RawDumpChannel
2554  *
2555  * DESCRIPTION: Constructor for RawDumpChannel
2556  *
2557  * PARAMETERS :
2558  *   @cam_handle    : Handle for Camera
2559  *   @cam_ops       : Function pointer table
2560  *   @rawDumpSize   : Dimensions for the Raw stream
2561  *   @paddinginfo   : Padding information for stream
2562  *   @userData      : Cookie for parent
2563  *   @pp mask       : PP feature mask for this stream
2564  *   @numBuffers    : number of max dequeued buffers
2565  *
2566  * RETURN           : NA
2567  *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,uint32_t numBuffers)2568 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
2569                     uint32_t channel_handle,
2570                     mm_camera_ops_t *cam_ops,
2571                     cam_dimension_t rawDumpSize,
2572                     cam_padding_info_t *paddingInfo,
2573                     void *userData,
2574                     cam_feature_mask_t postprocess_mask, uint32_t numBuffers) :
2575                         QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL,
2576                                 NULL, paddingInfo, postprocess_mask,
2577                                 userData, numBuffers),
2578                         mDim(rawDumpSize),
2579                         mMemory(NULL)
2580 {
2581     char prop[PROPERTY_VALUE_MAX];
2582     property_get("persist.camera.raw.dump", prop, "0");
2583     mRawDump = atoi(prop);
2584 }
2585 
2586 /*===========================================================================
2587  * FUNCTION   : QCamera3RawDumpChannel
2588  *
2589  * DESCRIPTION: Destructor for RawDumpChannel
2590  *
2591  * PARAMETERS :
2592  *
2593  * RETURN           : NA
2594  *==========================================================================*/
2595 
~QCamera3RawDumpChannel()2596 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
2597 {
2598     destroy();
2599 }
2600 
2601 /*===========================================================================
2602  * FUNCTION   : dumpRawSnapshot
2603  *
2604  * DESCRIPTION: Helper function to dump Raw frames
2605  *
2606  * PARAMETERS :
2607  *  @frame      : stream buf frame to be dumped
2608  *
2609  *  RETURN      : NA
2610  *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)2611 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2612 {
2613     QCamera3Stream *stream = getStreamByIndex(0);
2614     if (stream != NULL) {
2615         char buf[FILENAME_MAX];
2616         struct timeval tv;
2617         struct tm timeinfo_data;
2618         struct tm *timeinfo;
2619 
2620         cam_dimension_t dim;
2621         memset(&dim, 0, sizeof(dim));
2622         stream->getFrameDimension(dim);
2623 
2624         cam_frame_len_offset_t offset;
2625         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2626         stream->getFrameOffset(offset);
2627 
2628         gettimeofday(&tv, NULL);
2629         timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
2630 
2631         if (NULL != timeinfo) {
2632             memset(buf, 0, sizeof(buf));
2633             snprintf(buf, sizeof(buf),
2634                     QCAMERA_DUMP_FRM_LOCATION
2635                     "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
2636                     timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2637                     timeinfo->tm_mday, timeinfo->tm_hour,
2638                     timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
2639                     frame->frame_idx, dim.width, dim.height);
2640 
2641             int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
2642             if (file_fd >= 0) {
2643                 ssize_t written_len =
2644                         write(file_fd, frame->buffer, offset.frame_len);
2645                 LOGD("written number of bytes %zd", written_len);
2646                 frame->cache_flags |= CPU_HAS_READ;
2647                 close(file_fd);
2648             } else {
2649                 LOGE("failed to open file to dump image");
2650             }
2651         } else {
2652             LOGE("localtime_r() error");
2653         }
2654     } else {
2655         LOGE("Could not find stream");
2656     }
2657 
2658 }
2659 
2660 /*===========================================================================
2661  * FUNCTION   : streamCbRoutine
2662  *
2663  * DESCRIPTION: Callback routine invoked for each frame generated for
2664  *              Rawdump channel
2665  *
2666  * PARAMETERS :
2667  *   @super_frame  : stream buf frame generated
2668  *   @stream       : Underlying Stream object cookie
2669  *
2670  * RETURN          : NA
2671  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2672 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2673                                                 __unused QCamera3Stream *stream)
2674 {
2675     LOGD("E");
2676     if (super_frame == NULL || super_frame->num_bufs != 1) {
2677         LOGE("super_frame is not valid");
2678         return;
2679     }
2680 
2681     if (mRawDump)
2682         dumpRawSnapshot(super_frame->bufs[0]);
2683 
2684     bufDone(super_frame);
2685     free(super_frame);
2686 }
2687 
2688 /*===========================================================================
2689  * FUNCTION   : getStreamBufs
2690  *
2691  * DESCRIPTION: Callback function provided to interface to get buffers.
2692  *
2693  * PARAMETERS :
2694  *   @len       : Length of each buffer to be allocated
2695  *
2696  * RETURN     : NULL on buffer allocation failure
2697  *              QCamera3StreamMem object on sucess
2698  *==========================================================================*/
getStreamBufs(uint32_t len)2699 QCamera3StreamMem* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
2700 {
2701     int rc;
2702     mMemory = new QCamera3StreamMem(mNumBuffers);
2703 
2704     if (!mMemory) {
2705         LOGE("unable to create heap memory");
2706         return NULL;
2707     }
2708     rc = mMemory->allocateAll((size_t)len);
2709     if (rc < 0) {
2710         LOGE("unable to allocate heap memory");
2711         delete mMemory;
2712         mMemory = NULL;
2713         return NULL;
2714     }
2715     return mMemory;
2716 }
2717 
2718 /*===========================================================================
2719  * FUNCTION   : putStreamBufs
2720  *
2721  * DESCRIPTION: Callback function provided to interface to return buffers.
2722  *              Although no handles are actually returned, implicitl assumption
2723  *              that interface will no longer use buffers and channel can
2724  *              deallocated if necessary.
2725  *
2726  * PARAMETERS : NA
2727  *
2728  * RETURN     : NA
2729  *==========================================================================*/
putStreamBufs()2730 void QCamera3RawDumpChannel::putStreamBufs()
2731 {
2732     mMemory->deallocate();
2733     delete mMemory;
2734     mMemory = NULL;
2735 }
2736 
2737 /*===========================================================================
2738  * FUNCTION : request
2739  *
2740  * DESCRIPTION: Request function used as trigger
2741  *
2742  * PARAMETERS :
2743  * @recvd_frame : buffer- this will be NULL since this is internal channel
2744  * @frameNumber : Undefined again since this is internal stream
2745  *
2746  * RETURN     : int32_t type of status
2747  *              NO_ERROR  -- success
2748  *              none-zero failure code
2749  *==========================================================================*/
request(buffer_handle_t *,uint32_t,int &)2750 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
2751                                                 uint32_t /*frameNumber*/,
2752                                                 int & /*indexUsed*/)
2753 {
2754     if (!m_bIsActive) {
2755         return QCamera3Channel::start();
2756     }
2757     else
2758         return 0;
2759 }
2760 
2761 /*===========================================================================
2762  * FUNCTION : intialize
2763  *
2764  * DESCRIPTION: Initializes channel params and creates underlying stream
2765  *
2766  * PARAMETERS :
2767  *    @isType : type of image stabilization required on this stream
2768  *
2769  * RETURN     : int32_t type of status
2770  *              NO_ERROR  -- success
2771  *              none-zero failure code
2772  *==========================================================================*/
initialize(cam_is_type_t isType)2773 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
2774 {
2775     int32_t rc;
2776 
2777     mIsType = isType;
2778     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
2779         CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, (uint8_t)mNumBuffers,
2780         mPostProcMask, mIsType);
2781     if (rc < 0) {
2782         LOGE("addStream failed");
2783     }
2784     return rc;
2785 }
2786 
2787 /*************************************************************************************/
2788 // HDR+ RAW Source Channel related functions
QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,std::shared_ptr<HdrPlusClient> hdrPlusClient,uint32_t hdrPlusStreamId,uint32_t numBuffers)2789 QCamera3HdrPlusRawSrcChannel::QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,
2790                     uint32_t channel_handle,
2791                     mm_camera_ops_t *cam_ops,
2792                     cam_dimension_t rawDumpSize,
2793                     cam_padding_info_t *paddingInfo,
2794                     void *userData,
2795                     cam_feature_mask_t postprocess_mask,
2796                     std::shared_ptr<HdrPlusClient> hdrPlusClient,
2797                     uint32_t hdrPlusStreamId,
2798                     uint32_t numBuffers) :
2799     QCamera3RawDumpChannel(cam_handle, channel_handle, cam_ops, rawDumpSize, paddingInfo, userData,
2800                     postprocess_mask, numBuffers),
2801     mHdrPlusClient(hdrPlusClient),
2802     mHdrPlusStreamId(hdrPlusStreamId)
2803 {
2804 
2805 }
2806 
~QCamera3HdrPlusRawSrcChannel()2807 QCamera3HdrPlusRawSrcChannel::~QCamera3HdrPlusRawSrcChannel()
2808 {
2809 }
2810 
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2811 void QCamera3HdrPlusRawSrcChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2812                             __unused QCamera3Stream *stream)
2813 {
2814     if (super_frame == NULL || super_frame->num_bufs != 1) {
2815         LOGE("super_frame is not valid");
2816         return;
2817     }
2818 
2819     // Send RAW buffer to HDR+ service
2820     sendRawToHdrPlusService(super_frame->bufs[0]);
2821 
2822     bufDone(super_frame);
2823     free(super_frame);
2824 }
2825 
sendRawToHdrPlusService(mm_camera_buf_def_t * frame)2826 void QCamera3HdrPlusRawSrcChannel::sendRawToHdrPlusService(mm_camera_buf_def_t *frame)
2827 {
2828     QCamera3Stream *stream = getStreamByIndex(0);
2829     if (stream == nullptr) {
2830         LOGE("%s: Could not find stream.", __FUNCTION__);
2831         return;
2832     }
2833 
2834     cam_frame_len_offset_t offset = {};
2835     stream->getFrameOffset(offset);
2836 
2837     pbcamera::StreamBuffer buffer;
2838     buffer.streamId = mHdrPlusStreamId;
2839     buffer.data = frame->buffer;
2840     buffer.dataSize = offset.frame_len;
2841 
2842     // Use the frame timestamp as mock Easel timestamp.
2843     int64_t mockEaselTimestampNs = (int64_t)frame->ts.tv_sec * 1000000000 + frame->ts.tv_nsec;
2844     mHdrPlusClient->notifyInputBuffer(buffer, mockEaselTimestampNs);
2845 }
2846 
2847 /*************************************************************************************/
2848 
2849 /* QCamera3YUVChannel methods */
2850 
2851 /*===========================================================================
2852  * FUNCTION   : QCamera3YUVChannel
2853  *
2854  * DESCRIPTION: constructor of QCamera3YUVChannel
2855  *
2856  * PARAMETERS :
2857  *   @cam_handle : camera handle
2858  *   @cam_ops    : ptr to camera ops table
2859  *   @cb_routine : callback routine to frame aggregator
2860  *   @paddingInfo : padding information for the stream
2861  *   @stream     : camera3_stream_t structure
2862  *   @stream_type: Channel stream type
2863  *   @postprocess_mask: the postprocess mask for streams of this channel
2864  *   @metadataChannel: handle to the metadataChannel
2865  * RETURN     : none
2866  *==========================================================================*/
QCamera3YUVChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel)2867 QCamera3YUVChannel::QCamera3YUVChannel(uint32_t cam_handle,
2868         uint32_t channel_handle,
2869         mm_camera_ops_t *cam_ops,
2870         channel_cb_routine cb_routine,
2871         channel_cb_buffer_err cb_buf_err,
2872         cam_padding_info_t *paddingInfo,
2873         void *userData,
2874         camera3_stream_t *stream,
2875         cam_stream_type_t stream_type,
2876         cam_feature_mask_t postprocess_mask,
2877         QCamera3Channel *metadataChannel) :
2878             QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
2879                     cb_routine, cb_buf_err, paddingInfo, userData, stream, stream_type,
2880                     postprocess_mask, metadataChannel)
2881 {
2882 
2883     mBypass = (postprocess_mask == CAM_QCOM_FEATURE_NONE);
2884     mFrameLen = 0;
2885     mEdgeMode.edge_mode = CAM_EDGE_MODE_OFF;
2886     mEdgeMode.sharpness = 0;
2887     mNoiseRedMode = CAM_NOISE_REDUCTION_MODE_OFF;
2888     memset(&mCropRegion, 0, sizeof(mCropRegion));
2889 }
2890 
2891 /*===========================================================================
2892  * FUNCTION   : ~QCamera3YUVChannel
2893  *
2894  * DESCRIPTION: destructor of QCamera3YUVChannel
2895  *
2896  * PARAMETERS : none
2897  *
2898  *
2899  * RETURN     : none
2900  *==========================================================================*/
~QCamera3YUVChannel()2901 QCamera3YUVChannel::~QCamera3YUVChannel()
2902 {
2903    // Deallocation of heap buffers allocated in mMemory is freed
2904    // automatically by its destructor
2905 }
2906 
2907 /*===========================================================================
2908  * FUNCTION   : initialize
2909  *
2910  * DESCRIPTION: Initialize and add camera channel & stream
2911  *
2912  * PARAMETERS :
2913  * @isType    : the image stabilization type
2914  *
2915  * RETURN     : int32_t type of status
2916  *              NO_ERROR  -- success
2917  *              none-zero failure code
2918  *==========================================================================*/
initialize(cam_is_type_t isType)2919 int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
2920 {
2921     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_INIT);
2922     int32_t rc = NO_ERROR;
2923     cam_dimension_t streamDim;
2924 
2925     if (NULL == mCamera3Stream) {
2926         LOGE("Camera stream uninitialized");
2927         return NO_INIT;
2928     }
2929 
2930     if (1 <= m_numStreams) {
2931         // Only one stream per channel supported in v3 Hal
2932         return NO_ERROR;
2933     }
2934 
2935     mIsType  = isType;
2936     rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
2937             mStreamFormat);
2938     if (rc != NO_ERROR) {
2939         return -EINVAL;
2940     }
2941 
2942     streamDim.width = mCamera3Stream->width;
2943     streamDim.height = mCamera3Stream->height;
2944 
2945     rc = QCamera3Channel::addStream(mStreamType,
2946             mStreamFormat,
2947             streamDim,
2948             ROTATE_0,
2949             mNumBufs,
2950             mPostProcMask,
2951             mIsType);
2952     if (rc < 0) {
2953         LOGE("addStream failed");
2954         return rc;
2955     }
2956 
2957     cam_stream_buf_plane_info_t buf_planes;
2958     cam_padding_info_t paddingInfo = mPaddingInfo;
2959 
2960     memset(&buf_planes, 0, sizeof(buf_planes));
2961     //to ensure a big enough buffer size set the height and width
2962     //padding to max(height padding, width padding)
2963     paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
2964     paddingInfo.height_padding = paddingInfo.width_padding;
2965 
2966     rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
2967             &buf_planes);
2968     if (rc < 0) {
2969         LOGE("mm_stream_calc_offset_preview failed");
2970         return rc;
2971     }
2972 
2973     mFrameLen = buf_planes.plane_info.frame_len;
2974 
2975     if (NO_ERROR != rc) {
2976         LOGE("Initialize failed, rc = %d", rc);
2977         return rc;
2978     }
2979 
2980     /* initialize offline meta memory for input reprocess */
2981     rc = QCamera3ProcessingChannel::initialize(isType);
2982     if (NO_ERROR != rc) {
2983         LOGE("Processing Channel initialize failed, rc = %d",
2984                  rc);
2985     }
2986 
2987     return rc;
2988 }
2989 
2990 /*===========================================================================
2991  * FUNCTION   : request
2992  *
2993  * DESCRIPTION: entry function for a request on a YUV stream. This function
2994  *              has the logic to service a request based on its type
2995  *
2996  * PARAMETERS :
2997  * @buffer          : pointer to the output buffer
2998  * @frameNumber     : frame number of the request
2999  * @pInputBuffer    : pointer to input buffer if an input request
3000  * @metadata        : parameters associated with the request
3001  * @internalreq      : boolean to indicate if this is purely internal request
3002  *                    needing internal buffer allocation
3003  * @meteringonly    : boolean indicating metering only frame subset of internal
3004  *                    not consumed by postprocessor
3005  *
3006  * RETURN     : 0 on a success start of capture
3007  *              -EINVAL on invalid input
3008  *              -ENODEV on serious error
3009  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,bool & needMetadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)3010 int32_t QCamera3YUVChannel::request(buffer_handle_t *buffer,
3011         uint32_t frameNumber,
3012         camera3_stream_buffer_t* pInputBuffer,
3013         metadata_buffer_t* metadata, bool &needMetadata,
3014         int &indexUsed,
3015         __unused bool internalRequest = false,
3016         __unused bool meteringOnly = false)
3017 {
3018     int32_t rc = NO_ERROR;
3019     Mutex::Autolock lock(mOfflinePpLock);
3020 
3021     LOGD("pInputBuffer is %p frame number %d", pInputBuffer, frameNumber);
3022     if (NULL == buffer || NULL == metadata) {
3023         LOGE("Invalid buffer/metadata in channel request");
3024         return BAD_VALUE;
3025     }
3026 
3027     PpInfo ppInfo;
3028     memset(&ppInfo, 0, sizeof(ppInfo));
3029     ppInfo.frameNumber = frameNumber;
3030     ppInfo.offlinePpFlag = false;
3031     if (mBypass && !pInputBuffer ) {
3032         ppInfo.offlinePpFlag = needsFramePostprocessing(metadata);
3033         ppInfo.output = buffer;
3034         mOfflinePpInfoList.push_back(ppInfo);
3035     }
3036 
3037     LOGD("offlinePpFlag is %d", ppInfo.offlinePpFlag);
3038     needMetadata = ppInfo.offlinePpFlag;
3039     if (!ppInfo.offlinePpFlag) {
3040         // regular request
3041         return QCamera3ProcessingChannel::request(buffer, frameNumber,
3042                 pInputBuffer, metadata, indexUsed);
3043     } else {
3044 
3045         //we need to send this frame through the CPP
3046         //Allocate heap memory, then buf done on the buffer
3047         uint32_t bufIdx;
3048         if (mFreeHeapBufferList.empty()) {
3049             rc = mMemory.allocateOne(mFrameLen);
3050             if (rc < 0) {
3051                 LOGE("Failed allocating heap buffer. Fatal");
3052                 return BAD_VALUE;
3053             } else {
3054                 bufIdx = (uint32_t)rc;
3055             }
3056         } else {
3057             bufIdx = *(mFreeHeapBufferList.begin());
3058             mFreeHeapBufferList.erase(mFreeHeapBufferList.begin());
3059         }
3060 
3061         /* Configure and start postproc if necessary */
3062         reprocess_config_t reproc_cfg;
3063         cam_dimension_t dim;
3064         memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3065         memset(&dim, 0, sizeof(dim));
3066         mStreams[0]->getFrameDimension(dim);
3067         setReprocConfig(reproc_cfg, NULL, metadata, mStreamFormat, dim);
3068 
3069         // Start postprocessor without input buffer
3070         startPostProc(reproc_cfg);
3071 
3072         LOGD("erasing %d", bufIdx);
3073 
3074         mMemory.markFrameNumber(bufIdx, frameNumber);
3075         indexUsed = bufIdx;
3076         if (m_bIsActive) {
3077             mStreams[0]->bufDone(bufIdx);
3078         }
3079 
3080     }
3081     return rc;
3082 }
3083 
3084 /*===========================================================================
3085  * FUNCTION   : postprocFail
3086  *
3087  * DESCRIPTION: notify clients about failing post-process requests.
3088  *
3089  * PARAMETERS :
3090  * @ppBuffer  : pointer to the pp buffer.
3091  *
3092  * RETURN     : 0 on success
3093  *              -EINVAL on invalid input
3094  *==========================================================================*/
postprocFail(qcamera_hal3_pp_buffer_t * ppBuffer)3095 int32_t QCamera3YUVChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
3096     if (ppBuffer == nullptr) {
3097         return BAD_VALUE;
3098     }
3099 
3100     {
3101         List<PpInfo>::iterator ppInfo;
3102 
3103         Mutex::Autolock lock(mOfflinePpLock);
3104         for (ppInfo = mOfflinePpInfoList.begin();
3105                 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3106             if (ppInfo->frameNumber == ppBuffer->frameNumber) {
3107                 break;
3108             }
3109         }
3110 
3111         if (ppInfo == mOfflinePpInfoList.end()) {
3112             LOGE("Offline reprocess info for frame number: %d not found!", ppBuffer->frameNumber);
3113             return BAD_VALUE;
3114         }
3115 
3116         LOGE("Failed YUV post-process on frame number: %d removing from offline queue!",
3117                 ppBuffer->frameNumber);
3118         mOfflinePpInfoList.erase(ppInfo);
3119     }
3120 
3121     int32_t bufferIndex = mMemory.getHeapBufferIndex(ppBuffer->frameNumber);
3122     if (bufferIndex < 0) {
3123         LOGE("Fatal %d: no buffer index for frame number %d", bufferIndex, ppBuffer->frameNumber);
3124         return BAD_VALUE;
3125     } else {
3126         mMemory.markFrameNumber(bufferIndex, -1);
3127         mFreeHeapBufferList.push_back(bufferIndex);
3128     }
3129 
3130     return QCamera3ProcessingChannel::postprocFail(ppBuffer);
3131 }
3132 
3133 /*===========================================================================
3134  * FUNCTION   : streamCbRoutine
3135  *
3136  * DESCRIPTION:
3137  *
3138  * PARAMETERS :
3139  * @super_frame : the super frame with filled buffer
3140  * @stream      : stream on which the buffer was requested and filled
3141  *
3142  * RETURN     : none
3143  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)3144 void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
3145         QCamera3Stream *stream)
3146 {
3147     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB);
3148     uint8_t frameIndex;
3149     int32_t resultFrameNumber;
3150     bool droppedInputPPBuffer = false;
3151 
3152     if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
3153         LOGE("Error with the stream callback");
3154         return;
3155     }
3156 
3157     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
3158     if(frameIndex >= mNumBufs) {
3159          LOGE("Error, Invalid index for buffer");
3160          stream->bufDone(frameIndex);
3161          return;
3162     }
3163 
3164     if (mBypass) {
3165         {
3166             List<PpInfo>::iterator ppInfo;
3167 
3168             Mutex::Autolock lock(mOfflinePpLock);
3169             resultFrameNumber = mMemory.getFrameNumber(frameIndex);
3170             for (ppInfo = mOfflinePpInfoList.begin();
3171                     ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3172                 if (ppInfo->frameNumber == (uint32_t)resultFrameNumber) {
3173                     break;
3174                 }
3175             }
3176             LOGD("frame index %d, frame number %d", frameIndex,
3177                     resultFrameNumber);
3178             //check the reprocessing required flag against the frame number
3179             if (ppInfo == mOfflinePpInfoList.end()) {
3180                 LOGE("Error, request for frame number is a reprocess.");
3181                 stream->bufDone(frameIndex);
3182                 return;
3183             }
3184 
3185             if (ppInfo->offlinePpFlag) {
3186                 if (ppInfo != mOfflinePpInfoList.begin() &&
3187                         IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3188                     droppedInputPPBuffer = true;
3189                     mOfflinePpInfoList.erase(ppInfo);
3190                 } else {
3191                     mm_camera_super_buf_t *frame =
3192                         (mm_camera_super_buf_t *)malloc(sizeof(
3193                                     mm_camera_super_buf_t));
3194                     if (frame == NULL) {
3195                         LOGE("Error allocating memory to save received_frame structure.");
3196                         if(stream) {
3197                             stream->bufDone(frameIndex);
3198                         }
3199                         return;
3200                     }
3201 
3202                     *frame = *super_frame;
3203                     m_postprocessor.processData(frame, ppInfo->output,
3204                             resultFrameNumber);
3205                     free(super_frame);
3206                     return;
3207                 }
3208             } else {
3209                 if (ppInfo != mOfflinePpInfoList.begin()) {
3210                     // There is pending reprocess buffer, cache current buffer
3211                     if (ppInfo->callback_buffer != NULL) {
3212                         LOGE("Fatal: cached callback_buffer is already present");
3213                     }
3214                     ppInfo->callback_buffer = super_frame;
3215                     return;
3216                 } else {
3217                     mOfflinePpInfoList.erase(ppInfo);
3218                 }
3219             }
3220         }
3221 
3222         if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3223             mChannelCbBufErr(this, resultFrameNumber,
3224                             CAMERA3_BUFFER_STATUS_ERROR, mUserData);
3225             if (droppedInputPPBuffer) {
3226                 camera3_stream_buffer_t result = {};
3227                 result.buffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
3228                 int32_t bufferIndex =
3229                     mMemory.getHeapBufferIndex(resultFrameNumber);
3230                 if (bufferIndex < 0) {
3231                     LOGE("Fatal %d: no buffer index for frame number %d",
3232                             bufferIndex, resultFrameNumber);
3233                 } else {
3234                     mMemory.markFrameNumber(bufferIndex, -1);
3235                     mFreeHeapBufferList.push_back(bufferIndex);
3236                 }
3237 
3238                 LOGE("Input frame number: %d dropped!", resultFrameNumber);
3239                 result.stream = mCamera3Stream;
3240                 result.status = CAMERA3_BUFFER_STATUS_ERROR;
3241                 result.acquire_fence = -1;
3242                 result.release_fence = -1;
3243                 if (mChannelCB) {
3244                     mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
3245                 }
3246                 free(super_frame);
3247 
3248                 return;
3249             }
3250         }
3251     }
3252 
3253     QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3254 
3255     /* return any pending buffers that were received out of order earlier */
3256     while((super_frame = getNextPendingCbBuffer())) {
3257         QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3258     }
3259 
3260     return;
3261 }
3262 
3263 /*===========================================================================
3264  * FUNCTION   : getNextPendingCbBuffer
3265  *
3266  * DESCRIPTION: Returns the callback_buffer from the first entry of
3267  *              mOfflinePpInfoList
3268  *
3269  * PARAMETERS : none
3270  *
3271  * RETURN     : callback_buffer
3272  *==========================================================================*/
getNextPendingCbBuffer()3273 mm_camera_super_buf_t* QCamera3YUVChannel::getNextPendingCbBuffer() {
3274     mm_camera_super_buf_t* super_frame = NULL;
3275     if (mOfflinePpInfoList.size()) {
3276         if ((super_frame = mOfflinePpInfoList.begin()->callback_buffer)) {
3277             mOfflinePpInfoList.erase(mOfflinePpInfoList.begin());
3278         }
3279     }
3280     return super_frame;
3281 }
3282 
3283 /*===========================================================================
3284  * FUNCTION   : reprocessCbRoutine
3285  *
3286  * DESCRIPTION: callback function for the reprocessed frame. This frame now
3287  *              should be returned to the framework. This same callback is
3288  *              used during input reprocessing or offline postprocessing
3289  *
3290  * PARAMETERS :
3291  * @resultBuffer      : buffer containing the reprocessed data
3292  * @resultFrameNumber : frame number on which the buffer was requested
3293  *
3294  * RETURN     : NONE
3295  *
3296  *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)3297 void QCamera3YUVChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
3298         uint32_t resultFrameNumber)
3299 {
3300     LOGD("E: frame number %d", resultFrameNumber);
3301     Vector<mm_camera_super_buf_t *> pendingCbs;
3302 
3303     /* release the input buffer and input metadata buffer if used */
3304     if (0 > mMemory.getHeapBufferIndex(resultFrameNumber)) {
3305         /* mOfflineMemory and mOfflineMetaMemory used only for input reprocessing */
3306         int32_t rc = releaseOfflineMemory(resultFrameNumber);
3307         if (NO_ERROR != rc) {
3308             LOGE("Error releasing offline memory rc = %d", rc);
3309         }
3310         /* Since reprocessing is done, send the callback to release the input buffer */
3311         if (mChannelCB) {
3312             mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
3313         }
3314     }
3315 
3316     if (mBypass) {
3317         int32_t rc = handleOfflinePpCallback(resultFrameNumber, pendingCbs);
3318         if (rc != NO_ERROR) {
3319             return;
3320         }
3321     }
3322 
3323     issueChannelCb(resultBuffer, resultFrameNumber);
3324 
3325     // Call all pending callbacks to return buffers
3326     for (size_t i = 0; i < pendingCbs.size(); i++) {
3327         QCamera3ProcessingChannel::streamCbRoutine(
3328                 pendingCbs[i], mStreams[0]);
3329     }
3330 
3331 }
3332 
3333 /*===========================================================================
3334  * FUNCTION   : needsFramePostprocessing
3335  *
3336  * DESCRIPTION:
3337  *
3338  * PARAMETERS :
3339  *
3340  * RETURN     :
3341  *  TRUE if frame needs to be postprocessed
3342  *  FALSE is frame does not need to be postprocessed
3343  *
3344  *==========================================================================*/
needsFramePostprocessing(metadata_buffer_t * meta)3345 bool QCamera3YUVChannel::needsFramePostprocessing(metadata_buffer_t *meta)
3346 {
3347     bool ppNeeded = false;
3348 
3349     //sharpness
3350     IF_META_AVAILABLE(cam_edge_application_t, edgeMode,
3351             CAM_INTF_META_EDGE_MODE, meta) {
3352         mEdgeMode = *edgeMode;
3353     }
3354 
3355     //wnr
3356     IF_META_AVAILABLE(uint32_t, noiseRedMode,
3357             CAM_INTF_META_NOISE_REDUCTION_MODE, meta) {
3358         mNoiseRedMode = *noiseRedMode;
3359     }
3360 
3361     //crop region
3362     IF_META_AVAILABLE(cam_crop_region_t, scalerCropRegion,
3363             CAM_INTF_META_SCALER_CROP_REGION, meta) {
3364         mCropRegion = *scalerCropRegion;
3365     }
3366 
3367     if ((CAM_EDGE_MODE_OFF != mEdgeMode.edge_mode) &&
3368             (CAM_EDGE_MODE_ZERO_SHUTTER_LAG != mEdgeMode.edge_mode)) {
3369         ppNeeded = true;
3370     }
3371     if ((CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG != mNoiseRedMode) &&
3372             (CAM_NOISE_REDUCTION_MODE_OFF != mNoiseRedMode) &&
3373             (CAM_NOISE_REDUCTION_MODE_MINIMAL != mNoiseRedMode)) {
3374         ppNeeded = true;
3375     }
3376     if ((mCropRegion.width < (int32_t)mCamera3Stream->width) ||
3377             (mCropRegion.height < (int32_t)mCamera3Stream->height)) {
3378         ppNeeded = true;
3379     }
3380 
3381     return ppNeeded;
3382 }
3383 
3384 /*===========================================================================
3385  * FUNCTION   : handleOfflinePpCallback
3386  *
3387  * DESCRIPTION: callback function for the reprocessed frame from offline
3388  *              postprocessing.
3389  *
3390  * PARAMETERS :
3391  * @resultFrameNumber : frame number on which the buffer was requested
3392  * @pendingCbs        : pending buffers to be returned first
3393  *
3394  * RETURN     : int32_t type of status
3395  *              NO_ERROR  -- success
3396  *              none-zero failure code
3397  *==========================================================================*/
handleOfflinePpCallback(uint32_t resultFrameNumber,Vector<mm_camera_super_buf_t * > & pendingCbs)3398 int32_t QCamera3YUVChannel::handleOfflinePpCallback(uint32_t resultFrameNumber,
3399             Vector<mm_camera_super_buf_t *>& pendingCbs)
3400 {
3401     Mutex::Autolock lock(mOfflinePpLock);
3402     List<PpInfo>::iterator ppInfo;
3403 
3404     for (ppInfo = mOfflinePpInfoList.begin();
3405             ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3406         if (ppInfo->frameNumber == resultFrameNumber) {
3407             break;
3408         }
3409     }
3410 
3411     if (ppInfo == mOfflinePpInfoList.end()) {
3412         LOGI("Request of frame number %d is reprocessing",
3413                  resultFrameNumber);
3414         return NO_ERROR;
3415     } else if (ppInfo != mOfflinePpInfoList.begin()) {
3416         LOGE("callback for frame number %d should be head of list",
3417                  resultFrameNumber);
3418         return BAD_VALUE;
3419     }
3420 
3421     if (ppInfo->offlinePpFlag) {
3422         // Need to get the input buffer frame index from the
3423         // mMemory object and add that to the free heap buffers list.
3424         int32_t bufferIndex =
3425                 mMemory.getHeapBufferIndex(resultFrameNumber);
3426         if (bufferIndex < 0) {
3427             LOGE("Fatal %d: no buffer index for frame number %d",
3428                      bufferIndex, resultFrameNumber);
3429             return BAD_VALUE;
3430         }
3431         mMemory.markFrameNumber(bufferIndex, -1);
3432         mFreeHeapBufferList.push_back(bufferIndex);
3433         //Move heap buffer into free pool and invalidate the frame number
3434         ppInfo = mOfflinePpInfoList.erase(ppInfo);
3435 
3436         /* return any pending buffers that were received out of order
3437             earlier */
3438         mm_camera_super_buf_t* super_frame;
3439         while((super_frame = getNextPendingCbBuffer())) {
3440             pendingCbs.push_back(super_frame);
3441         }
3442     } else {
3443         LOGE("Fatal: request of frame number %d doesn't need"
3444                 " offline postprocessing. However there is"
3445                 " reprocessing callback.",
3446                 resultFrameNumber);
3447         return BAD_VALUE;
3448     }
3449 
3450     return NO_ERROR;
3451 }
3452 
3453 /*===========================================================================
3454  * FUNCTION   : getReprocessType
3455  *
3456  * DESCRIPTION: get the type of reprocess output supported by this channel
3457  *
3458  * PARAMETERS : NONE
3459  *
3460  * RETURN     : reprocess_type_t : type of reprocess
3461  *==========================================================================*/
getReprocessType()3462 reprocess_type_t QCamera3YUVChannel::getReprocessType()
3463 {
3464     return REPROCESS_TYPE_YUV;
3465 }
3466 
3467 /* QCamera3PicChannel methods */
3468 
3469 /*===========================================================================
3470  * FUNCTION   : jpegEvtHandle
3471  *
3472  * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
3473                 Construct result payload and call mChannelCb to deliver buffer
3474                 to framework.
3475  *
3476  * PARAMETERS :
3477  *   @status    : status of jpeg job
3478  *   @client_hdl: jpeg client handle
3479  *   @jobId     : jpeg job Id
3480  *   @p_ouput   : ptr to jpeg output result struct
3481  *   @userdata  : user data ptr
3482  *
3483  * RETURN     : none
3484  *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)3485 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
3486                                               uint32_t /*client_hdl*/,
3487                                               uint32_t jobId,
3488                                               mm_jpeg_output_t *p_output,
3489                                               void *userdata)
3490 {
3491     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE);
3492     buffer_handle_t *resultBuffer = NULL;
3493     buffer_handle_t *jpegBufferHandle = NULL;
3494     int resultStatus = CAMERA3_BUFFER_STATUS_OK;
3495     camera3_stream_buffer_t result;
3496     camera3_jpeg_blob_t jpegHeader;
3497 
3498     QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
3499     if (obj) {
3500         //Construct payload for process_capture_result. Call mChannelCb
3501 
3502         qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
3503 
3504         if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
3505             LOGE("Error in jobId: (%d) with status: %d", jobId, status);
3506             resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3507         }
3508 
3509         if (NULL != job) {
3510             uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index;
3511             LOGD("jpeg out_buf_index: %d", bufIdx);
3512 
3513             //Construct jpeg transient header of type camera3_jpeg_blob_t
3514             //Append at the end of jpeg image of buf_filled_len size
3515 
3516             jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
3517             if (JPEG_JOB_STATUS_DONE == status) {
3518                 jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len;
3519                 char* jpeg_buf = (char *)p_output->buf_vaddr;
3520                 cam_frame_len_offset_t offset;
3521                 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
3522                 mm_camera_buf_def_t *jpeg_dump_buffer = NULL;
3523                 cam_dimension_t dim;
3524                 dim.width = obj->mCamera3Stream->width;
3525                 dim.height = obj->mCamera3Stream->height;
3526                 jpeg_dump_buffer = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t));
3527                 if(!jpeg_dump_buffer) {
3528                     LOGE("Could not allocate jpeg dump buffer");
3529                 } else {
3530                     jpeg_dump_buffer->buffer = p_output->buf_vaddr;
3531                     jpeg_dump_buffer->frame_len = p_output->buf_filled_len;
3532                     jpeg_dump_buffer->frame_idx = obj->mMemory.getFrameNumber(bufIdx);
3533                     obj->dumpYUV(jpeg_dump_buffer, dim, offset, QCAMERA_DUMP_FRM_OUTPUT_JPEG);
3534                     free(jpeg_dump_buffer);
3535                 }
3536 
3537                 ssize_t maxJpegSize = -1;
3538 
3539                 // Gralloc buffer may have additional padding for 4K page size
3540                 // Follow size guidelines based on spec since framework relies
3541                 // on that to reach end of buffer and with it the header
3542 
3543                 //Handle same as resultBuffer, but for readablity
3544                 jpegBufferHandle =
3545                         (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3546 
3547                 if (NULL != jpegBufferHandle) {
3548                     maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
3549                     if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
3550                         maxJpegSize = obj->mMemory.getSize(bufIdx);
3551                     }
3552 
3553                     size_t jpeg_eof_offset =
3554                             (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader));
3555                     char *jpeg_eof = &jpeg_buf[jpeg_eof_offset];
3556                     memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
3557                     obj->mMemory.cleanInvalidateCache(bufIdx);
3558                 } else {
3559                     LOGE("JPEG buffer not found and index: %d",
3560                             bufIdx);
3561                     resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3562                 }
3563             }
3564 
3565             ////Use below data to issue framework callback
3566             resultBuffer =
3567                     (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3568             int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
3569             int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
3570             if (NO_ERROR != rc) {
3571                 LOGE("Error %d unregistering stream buffer %d",
3572                      rc, bufIdx);
3573             }
3574 
3575             result.stream = obj->mCamera3Stream;
3576             result.buffer = resultBuffer;
3577             result.status = resultStatus;
3578             result.acquire_fence = -1;
3579             result.release_fence = -1;
3580 
3581             // Release any snapshot buffers before calling
3582             // the user callback. The callback can potentially
3583             // unblock pending requests to snapshot stream.
3584             int32_t snapshotIdx = -1;
3585             mm_camera_super_buf_t* src_frame = NULL;
3586 
3587             if (job->src_reproc_frame)
3588                 src_frame = job->src_reproc_frame;
3589             else
3590                 src_frame = job->src_frame;
3591 
3592             if (src_frame) {
3593                 if (obj->mStreams[0]->getMyHandle() ==
3594                         src_frame->bufs[0]->stream_id) {
3595                     snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx;
3596                     if (0 <= snapshotIdx) {
3597                         Mutex::Autolock lock(obj->mFreeBuffersLock);
3598                         obj->mFreeBufferList.push_back((uint32_t)snapshotIdx);
3599                     }
3600                 }
3601             }
3602 
3603             LOGI("Issue Jpeg Callback frameNumber = %d status = %d",
3604                     resultFrameNumber, resultStatus);
3605             ATRACE_ASYNC_END("SNAPSHOT", resultFrameNumber);
3606             if (obj->mChannelCB) {
3607                 obj->mChannelCB(NULL,
3608                         &result,
3609                         (uint32_t)resultFrameNumber,
3610                         false,
3611                         obj->mUserData);
3612             }
3613 
3614             // release internal data for jpeg job
3615             if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
3616                 /* unregister offline input buffer */
3617                 int32_t inputBufIndex =
3618                         obj->mOfflineMemory.getGrallocBufferIndex((uint32_t)resultFrameNumber);
3619                 if (0 <= inputBufIndex) {
3620                     rc = obj->mOfflineMemory.unregisterBuffer(inputBufIndex);
3621                 } else {
3622                     LOGE("could not find the input buf index, frame number %d",
3623                              resultFrameNumber);
3624                 }
3625                 if (NO_ERROR != rc) {
3626                     LOGE("Error %d unregistering input buffer %d",
3627                              rc, bufIdx);
3628                 }
3629 
3630                 /* unregister offline meta buffer */
3631                 int32_t metaBufIndex =
3632                         obj->mOfflineMetaMemory.getHeapBufferIndex((uint32_t)resultFrameNumber);
3633                 if (0 <= metaBufIndex) {
3634                     Mutex::Autolock lock(obj->mFreeOfflineMetaBuffersLock);
3635                     obj->mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
3636                 } else {
3637                     LOGE("could not find the input meta buf index, frame number %d",
3638                              resultFrameNumber);
3639                 }
3640             }
3641             obj->m_postprocessor.releaseOfflineBuffers(false);
3642             obj->m_postprocessor.releaseJpegJobData(job);
3643             free(job);
3644         }
3645 
3646         return;
3647         // }
3648     } else {
3649         LOGE("Null userdata in jpeg callback");
3650     }
3651 }
3652 
QCamera3PicChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,__unused bool is4KVideo,bool isInputStreamConfigured,bool useY8,QCamera3Channel * metadataChannel,uint32_t numBuffers)3653 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
3654                     uint32_t channel_handle,
3655                     mm_camera_ops_t *cam_ops,
3656                     channel_cb_routine cb_routine,
3657                     channel_cb_buffer_err cb_buf_err,
3658                     cam_padding_info_t *paddingInfo,
3659                     void *userData,
3660                     camera3_stream_t *stream,
3661                     cam_feature_mask_t postprocess_mask,
3662                     __unused bool is4KVideo,
3663                     bool isInputStreamConfigured,
3664                     bool useY8,
3665                     QCamera3Channel *metadataChannel,
3666                     uint32_t numBuffers) :
3667                         QCamera3ProcessingChannel(cam_handle, channel_handle,
3668                                 cam_ops, cb_routine, cb_buf_err, paddingInfo, userData,
3669                                 stream, CAM_STREAM_TYPE_SNAPSHOT,
3670                                 postprocess_mask, metadataChannel, numBuffers),
3671                         mNumSnapshotBufs(0),
3672                         mInputBufferHint(isInputStreamConfigured),
3673                         mYuvMemory(NULL),
3674                         mFrameLen(0)
3675 {
3676     QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
3677     m_max_pic_dim = hal_obj->calcMaxJpegDim();
3678     mYuvWidth = stream->width;
3679     mYuvHeight = stream->height;
3680     mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
3681     // Use same pixelformat for 4K video case
3682     mStreamFormat = useY8 ? CAM_FORMAT_Y_ONLY : getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
3683             stream->width, stream->height, m_bUBWCenable, IS_TYPE_NONE);
3684     int32_t rc = m_postprocessor.initJpeg(jpegEvtHandle, &m_max_pic_dim, this);
3685     if (rc != 0) {
3686         LOGE("Init Postprocessor failed");
3687     }
3688 }
3689 
3690 /*===========================================================================
3691  * FUNCTION   : flush
3692  *
3693  * DESCRIPTION: flush pic channel, which will stop all processing within, including
3694  *              the reprocessing channel in postprocessor and YUV stream.
3695  *
3696  * PARAMETERS : none
3697  *
3698  * RETURN     : int32_t type of status
3699  *              NO_ERROR  -- success
3700  *              none-zero failure code
3701  *==========================================================================*/
flush()3702 int32_t QCamera3PicChannel::flush()
3703 {
3704     int32_t rc = NO_ERROR;
3705     if(!m_bIsActive) {
3706         LOGE("Attempt to flush inactive channel");
3707         return NO_INIT;
3708     }
3709 
3710     rc = m_postprocessor.flush();
3711     if (rc == 0) {
3712         LOGE("Postprocessor flush failed, rc = %d", rc);
3713         return rc;
3714     }
3715 
3716     if (0 < mOfflineMetaMemory.getCnt()) {
3717         mOfflineMetaMemory.deallocate();
3718     }
3719     if (0 < mOfflineMemory.getCnt()) {
3720         mOfflineMemory.unregisterBuffers();
3721     }
3722     Mutex::Autolock lock(mFreeBuffersLock);
3723     mFreeBufferList.clear();
3724 
3725     for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
3726         mFreeBufferList.push_back(i);
3727     }
3728     return rc;
3729 }
3730 
3731 
~QCamera3PicChannel()3732 QCamera3PicChannel::~QCamera3PicChannel()
3733 {
3734     if (mYuvMemory != nullptr) {
3735         mYuvMemory->deallocate();
3736         delete mYuvMemory;
3737         mYuvMemory = nullptr;
3738     }
3739 }
3740 
3741 /*===========================================================================
3742  * FUNCTION : metadataBufDone
3743  *
3744  * DESCRIPTION: Buffer done method for a metadata buffer
3745  *
3746  * PARAMETERS :
3747  * @recvd_frame : received metadata frame
3748  *
3749  * RETURN     : int32_t type of status
3750  *              OK  -- success
3751  *              none-zero failure code
3752  *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)3753 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
3754 {
3755     // Check if this is an external metadata
3756     if (recvd_frame != nullptr && recvd_frame->num_bufs == 1) {
3757         Mutex::Autolock lock(mPendingExternalMetadataLock);
3758         auto iter = mPendingExternalMetadata.begin();
3759         while (iter != mPendingExternalMetadata.end()) {
3760             if (iter->get() == recvd_frame->bufs[0]->buffer) {
3761                 // Remove the metadata allocated externally.
3762                 mPendingExternalMetadata.erase(iter);
3763                 return OK;
3764             }
3765 
3766             iter++;
3767         }
3768     }
3769 
3770     // If this is not an external metadata, return the metadata.
3771     return QCamera3ProcessingChannel::metadataBufDone(recvd_frame);
3772 }
3773 
initialize(cam_is_type_t isType)3774 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
3775 {
3776     int32_t rc = NO_ERROR;
3777     cam_dimension_t streamDim;
3778     cam_stream_type_t streamType;
3779     cam_format_t streamFormat;
3780 
3781     if (NULL == mCamera3Stream) {
3782         LOGE("Camera stream uninitialized");
3783         return NO_INIT;
3784     }
3785 
3786     if (1 <= m_numStreams) {
3787         // Only one stream per channel supported in v3 Hal
3788         return NO_ERROR;
3789     }
3790 
3791     mIsType = isType;
3792     streamType = mStreamType;
3793     streamFormat = mStreamFormat;
3794     streamDim.width = (int32_t)mYuvWidth;
3795     streamDim.height = (int32_t)mYuvHeight;
3796 
3797     mNumSnapshotBufs = mCamera3Stream->max_buffers;
3798     rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
3799             ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask,
3800             mIsType);
3801 
3802     if (NO_ERROR != rc) {
3803         LOGE("Initialize failed, rc = %d", rc);
3804         return rc;
3805     }
3806 
3807     mYuvMemory = new QCamera3StreamMem(mCamera3Stream->max_buffers);
3808     if (!mYuvMemory) {
3809         LOGE("unable to create YUV buffers");
3810         return NO_MEMORY;
3811     }
3812     cam_stream_buf_plane_info_t buf_planes;
3813     cam_padding_info_t paddingInfo = mPaddingInfo;
3814 
3815     memset(&buf_planes, 0, sizeof(buf_planes));
3816     //to ensure a big enough buffer size set the height and width
3817     //padding to max(height padding, width padding)
3818     paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
3819     paddingInfo.height_padding = paddingInfo.width_padding;
3820 
3821     rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
3822             &buf_planes);
3823     if (rc < 0) {
3824         LOGE("mm_stream_calc_offset_preview failed");
3825         return rc;
3826     }
3827     mFrameLen = buf_planes.plane_info.frame_len;
3828 
3829     /* initialize offline meta memory for input reprocess */
3830     rc = QCamera3ProcessingChannel::initialize(isType);
3831     if (NO_ERROR != rc) {
3832         LOGE("Processing Channel initialize failed, rc = %d",
3833                  rc);
3834     }
3835 
3836     return rc;
3837 }
3838 
3839 /*===========================================================================
3840  * FUNCTION   : request
3841  *
3842  * DESCRIPTION: handle the request - either with an input buffer or a direct
3843  *              output request
3844  *
3845  * PARAMETERS :
3846  * @buffer       : pointer to the output buffer
3847  * @frameNumber  : frame number of the request
3848  * @pInputBuffer : pointer to input buffer if an input request
3849  * @metadata     : parameters associated with the request
3850  * @internalreq      : boolean to indicate if this is purely internal request
3851  *                    needing internal buffer allocation
3852  * @meteringonly    : boolean indicating metering only frame subset of internal
3853  *                    not consumed by postprocessor
3854  *
3855  * RETURN     : 0 on a success start of capture
3856  *              -EINVAL on invalid input
3857  *              -ENODEV on serious error
3858  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,bool internalRequest,bool meteringOnly)3859 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
3860         uint32_t frameNumber,
3861         camera3_stream_buffer_t *pInputBuffer,
3862         metadata_buffer_t *metadata, int &indexUsed,
3863         bool internalRequest, bool meteringOnly)
3864 {
3865     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_REQ);
3866     //FIX ME: Return buffer back in case of failures below.
3867 
3868     int32_t rc = NO_ERROR;
3869 
3870     reprocess_config_t reproc_cfg;
3871     cam_dimension_t dim;
3872     memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3873     //make sure to set the correct input stream dim in case of YUV size override
3874     //and recalculate the plane info
3875     dim.width = (int32_t)mYuvWidth;
3876     dim.height = (int32_t)mYuvHeight;
3877 
3878     setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
3879 
3880     // Start postprocessor
3881     startPostProc(reproc_cfg);
3882 
3883     if (!internalRequest) {
3884         int index = mMemory.getMatchBufIndex((void*)buffer);
3885 
3886         if(index < 0) {
3887             rc = registerBuffer(buffer, mIsType);
3888             if (NO_ERROR != rc) {
3889                 LOGE("On-the-fly buffer registration failed %d",
3890                          rc);
3891                 return rc;
3892             }
3893 
3894             index = mMemory.getMatchBufIndex((void*)buffer);
3895             if (index < 0) {
3896                 LOGE("Could not find object among registered buffers");
3897                 return DEAD_OBJECT;
3898             }
3899         }
3900         LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
3901 
3902         rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
3903 
3904         // Queue jpeg settings
3905         rc = queueJpegSetting((uint32_t)index, metadata);
3906 
3907     } else {
3908         LOGD("Internal request @ Picchannel");
3909     }
3910 
3911 
3912     if (pInputBuffer == NULL) {
3913         Mutex::Autolock lock(mFreeBuffersLock);
3914         uint32_t bufIdx;
3915         if (mFreeBufferList.empty()) {
3916             rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
3917             if (rc < 0) {
3918                 LOGE("Failed to allocate heap buffer. Fatal");
3919                 return rc;
3920             } else {
3921                 bufIdx = (uint32_t)rc;
3922             }
3923         } else {
3924             List<uint32_t>::iterator it = mFreeBufferList.begin();
3925             bufIdx = *it;
3926             mFreeBufferList.erase(it);
3927         }
3928         if (meteringOnly) {
3929             mYuvMemory->markFrameNumber(bufIdx, 0xFFFFFFFF);
3930         } else {
3931             mYuvMemory->markFrameNumber(bufIdx, frameNumber);
3932         }
3933         if (m_bIsActive) {
3934             mStreams[0]->bufDone(bufIdx);
3935         }
3936         indexUsed = bufIdx;
3937     } else {
3938         qcamera_fwk_input_pp_data_t *src_frame = NULL;
3939         src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
3940                 sizeof(qcamera_fwk_input_pp_data_t));
3941         if (src_frame == NULL) {
3942             LOGE("No memory for src frame");
3943             return NO_MEMORY;
3944         }
3945         rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata,
3946                 NULL /*fwk output buffer*/, frameNumber);
3947         if (NO_ERROR != rc) {
3948             LOGE("Error %d while setting framework input PP data", rc);
3949             free(src_frame);
3950             return rc;
3951         }
3952         LOGH("Post-process started");
3953         m_postprocessor.processData(src_frame);
3954     }
3955     return rc;
3956 }
3957 
3958 
3959 
3960 /*===========================================================================
3961  * FUNCTION   : dataNotifyCB
3962  *
3963  * DESCRIPTION: Channel Level callback used for super buffer data notify.
3964  *              This function is registered with mm-camera-interface to handle
3965  *              data notify
3966  *
3967  * PARAMETERS :
3968  *   @recvd_frame   : stream frame received
3969  *   userdata       : user data ptr
3970  *
3971  * RETURN     : none
3972  *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)3973 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
3974                                  void *userdata)
3975 {
3976     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB);
3977     LOGD("E\n");
3978     QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
3979 
3980     if (channel == NULL) {
3981         LOGE("invalid channel pointer");
3982         return;
3983     }
3984 
3985     if(channel->m_numStreams != 1) {
3986         LOGE("Error: Bug: This callback assumes one stream per channel");
3987         return;
3988     }
3989 
3990 
3991     if(channel->mStreams[0] == NULL) {
3992         LOGE("Error: Invalid Stream object");
3993         return;
3994     }
3995 
3996     channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
3997 
3998     LOGD("X\n");
3999     return;
4000 }
4001 
4002 /*===========================================================================
4003  * FUNCTION   : streamCbRoutine
4004  *
4005  * DESCRIPTION:
4006  *
4007  * PARAMETERS :
4008  * @super_frame : the super frame with filled buffer
4009  * @stream      : stream on which the buffer was requested and filled
4010  *
4011  * RETURN     : none
4012  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)4013 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
4014                             QCamera3Stream *stream)
4015 {
4016     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CAPTURE_CH_CB);
4017     //TODO
4018     //Used only for getting YUV. Jpeg callback will be sent back from channel
4019     //directly to HWI. Refer to func jpegEvtHandle
4020 
4021     //Got the yuv callback. Calling yuv callback handler in PostProc
4022     uint8_t frameIndex;
4023     mm_camera_super_buf_t* frame = NULL;
4024     cam_dimension_t dim;
4025     cam_frame_len_offset_t offset;
4026 
4027     memset(&dim, 0, sizeof(dim));
4028     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
4029 
4030     if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
4031         LOGE("Error with the stream callback");
4032         return;
4033     }
4034 
4035     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4036     LOGD("recvd buf_idx: %u for further processing",
4037          (uint32_t)frameIndex);
4038     if(frameIndex >= mNumSnapshotBufs) {
4039          LOGE("Error, Invalid index for buffer");
4040          if(stream) {
4041              Mutex::Autolock lock(mFreeBuffersLock);
4042              mFreeBufferList.push_back(frameIndex);
4043              stream->bufDone(frameIndex);
4044          }
4045          return;
4046     }
4047 
4048     if ((uint32_t)mYuvMemory->getFrameNumber(frameIndex) == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4049         LOGD("Internal Request recycle frame");
4050         Mutex::Autolock lock(mFreeBuffersLock);
4051         mFreeBufferList.push_back(frameIndex);
4052         return;
4053     }
4054 
4055     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4056     if (frame == NULL) {
4057        LOGE("Error allocating memory to save received_frame structure.");
4058        if(stream) {
4059            Mutex::Autolock lock(mFreeBuffersLock);
4060            mFreeBufferList.push_back(frameIndex);
4061            stream->bufDone(frameIndex);
4062        }
4063        return;
4064     }
4065     *frame = *super_frame;
4066     stream->getFrameDimension(dim);
4067     stream->getFrameOffset(offset);
4068     dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
4069 
4070     if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
4071         mChannelCbBufErr(this, mYuvMemory->getFrameNumber(frameIndex),
4072                 CAMERA3_BUFFER_STATUS_ERROR, mUserData);
4073     }
4074 
4075     m_postprocessor.processData(frame);
4076     free(super_frame);
4077     return;
4078 }
4079 
getStreamBufs(uint32_t)4080 QCamera3StreamMem* QCamera3PicChannel::getStreamBufs(uint32_t /*len*/)
4081 {
4082     return mYuvMemory;
4083 }
4084 
putStreamBufs()4085 void QCamera3PicChannel::putStreamBufs()
4086 {
4087     QCamera3ProcessingChannel::putStreamBufs();
4088     Mutex::Autolock lock(mFreeBuffersLock);
4089     mFreeBufferList.clear();
4090 
4091     if (nullptr != mYuvMemory) {
4092         uint32_t count = mYuvMemory->getCnt();
4093         for (uint32_t i = 0; i < count; i++) {
4094             mFreeBufferList.push_back(i);
4095         }
4096     }
4097 }
4098 
queueJpegSetting(uint32_t index,metadata_buffer_t * metadata)4099 int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata)
4100 {
4101     jpeg_settings_t *settings =
4102             (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
4103 
4104     if (!settings) {
4105         LOGE("out of memory allocating jpeg_settings");
4106         return -ENOMEM;
4107     }
4108 
4109     auto ret = initializeJpegSetting(index, metadata, settings);
4110     if (ret != NO_ERROR) {
4111         return ret;
4112     }
4113 
4114     return m_postprocessor.processJpegSettingData(settings);
4115 }
4116 
initializeJpegSetting(uint32_t index,metadata_buffer_t * metadata,jpeg_settings_t * settings)4117 int32_t QCamera3PicChannel::initializeJpegSetting(uint32_t index, metadata_buffer_t *metadata,
4118         jpeg_settings_t *settings) {
4119     if ((settings == nullptr) || (metadata == nullptr)) {
4120         return BAD_VALUE;
4121     }
4122     memset(settings, 0, sizeof(jpeg_settings_t));
4123 
4124     QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
4125     settings->out_buf_index = index;
4126 
4127     settings->jpeg_orientation = 0;
4128     IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4129         settings->jpeg_orientation = *orientation;
4130     }
4131 
4132     settings->jpeg_quality = 85;
4133     IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) {
4134         settings->jpeg_quality = (uint8_t) *quality1;
4135     }
4136 
4137     IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4138         settings->jpeg_thumb_quality = (uint8_t) *quality2;
4139     }
4140 
4141     IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4142         settings->thumbnail_size = *dimension;
4143     }
4144 
4145     settings->gps_timestamp_valid = 0;
4146     IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4147         settings->gps_timestamp = *timestamp;
4148         settings->gps_timestamp_valid = 1;
4149     }
4150 
4151     settings->gps_coordinates_valid = 0;
4152     IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4153         memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
4154         settings->gps_coordinates_valid = 1;
4155     }
4156 
4157     IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4158         memset(settings->gps_processing_method, 0,
4159                 sizeof(settings->gps_processing_method));
4160         strlcpy(settings->gps_processing_method, (const char *)proc_methods,
4161                 sizeof(settings->gps_processing_method));
4162     }
4163 
4164     settings->hdr_snapshot = 0;
4165     IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
4166         if (hdr_info->hdr_enable) {
4167             settings->hdr_snapshot = 1;
4168         }
4169     }
4170 
4171 
4172     // Image description
4173     const char *eepromVersion = hal_obj->getEepromVersionInfo();
4174     const uint32_t *ldafCalib = hal_obj->getLdafCalib();
4175     const char *easelFwVersion = hal_obj->getEaselFwVersion();
4176     if ((eepromVersion && strlen(eepromVersion)) ||
4177             ldafCalib || easelFwVersion) {
4178         uint32_t len = 0;
4179         settings->image_desc_valid = true;
4180         if (eepromVersion && strlen(eepromVersion)) {
4181             len = snprintf(settings->image_desc, sizeof(settings->image_desc),
4182                     "%s", eepromVersion);
4183         }
4184         if (ldafCalib) {
4185             len += snprintf(settings->image_desc + len,
4186                     sizeof(settings->image_desc) - len, "L:%u-%u",
4187                     ldafCalib[0], ldafCalib[1]);
4188         }
4189         if (easelFwVersion) {
4190             ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion);
4191             if (len > 0 && len < sizeof(settings->image_desc)) {
4192                 settings->image_desc[len] = ',';
4193                 len++;
4194             }
4195             len += snprintf(settings->image_desc + len,
4196                             sizeof(settings->image_desc) - len, "E-ver:%s", easelFwVersion);
4197         }
4198     }
4199 
4200     return NO_ERROR;
4201 }
4202 
4203 
overrideYuvSize(uint32_t width,uint32_t height)4204 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
4205 {
4206    mYuvWidth = width;
4207    mYuvHeight = height;
4208 }
4209 
4210 /*===========================================================================
4211  * FUNCTION   : getReprocessType
4212  *
4213  * DESCRIPTION: get the type of reprocess output supported by this channel
4214  *
4215  * PARAMETERS : NONE
4216  *
4217  * RETURN     : reprocess_type_t : type of reprocess
4218  *==========================================================================*/
getReprocessType()4219 reprocess_type_t QCamera3PicChannel::getReprocessType()
4220 {
4221     /* a picture channel could either use the postprocessor for reprocess+jpeg
4222        or only for reprocess */
4223     reprocess_type_t expectedReprocess;
4224     if (mPostProcMask == CAM_QCOM_FEATURE_NONE || mInputBufferHint) {
4225         expectedReprocess = REPROCESS_TYPE_JPEG;
4226     } else {
4227         expectedReprocess = REPROCESS_TYPE_NONE;
4228     }
4229     LOGH("expectedReprocess from Pic Channel is %d", expectedReprocess);
4230     return expectedReprocess;
4231 }
4232 
4233 
4234 /*===========================================================================
4235  * FUNCTION   : timeoutFrame
4236  *
4237  * DESCRIPTION: Method to indicate to channel that a given frame has take too
4238  *              long to be generated
4239  *
4240  * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
4241  *
4242  * RETURN     : int32_t type of status
4243  *              NO_ERROR  -- success
4244  *              none-zero failure code
4245  *==========================================================================*/
timeoutFrame(uint32_t frameNumber)4246 int32_t QCamera3PicChannel::timeoutFrame(uint32_t frameNumber)
4247 {
4248     int32_t bufIdx;
4249 
4250     bufIdx = mYuvMemory->getBufferIndex(frameNumber);
4251 
4252     if (bufIdx < 0) {
4253         LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
4254         return -1;
4255     }
4256 
4257     mStreams[0]->timeoutFrame(bufIdx);
4258 
4259     return NO_ERROR;
4260 }
4261 
getYuvBufferForRequest(mm_camera_buf_def_t * frame,uint32_t frameNumber)4262 int32_t QCamera3PicChannel::getYuvBufferForRequest(mm_camera_buf_def_t *frame,
4263         uint32_t frameNumber)
4264 {
4265     uint32_t bufIdx;
4266     status_t rc;
4267 
4268     Mutex::Autolock lock(mFreeBuffersLock);
4269 
4270     // Get an available YUV buffer.
4271     if (mFreeBufferList.empty()) {
4272         // Allocate a buffer if no one is available.
4273         rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
4274         if (rc < 0) {
4275             LOGE("Failed to allocate heap buffer. Fatal");
4276             return rc;
4277         } else {
4278             bufIdx = (uint32_t)rc;
4279         }
4280     } else {
4281         List<uint32_t>::iterator it = mFreeBufferList.begin();
4282         bufIdx = *it;
4283         mFreeBufferList.erase(it);
4284     }
4285 
4286     mYuvMemory->markFrameNumber(bufIdx, frameNumber);
4287 
4288     cam_frame_len_offset_t offset = {};
4289     mStreams[0]->getFrameOffset(offset);
4290 
4291     // Get a buffer from YUV memory.
4292     rc = mYuvMemory->getBufDef(offset, *frame, bufIdx, mMapStreamBuffers);
4293     if (rc != 0) {
4294         ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
4295         return rc;
4296     }
4297 
4298     // Set the frame's stream ID because it's not set in getBufDef.
4299     frame->stream_id = mStreams[0]->getMyHandle();
4300     return 0;
4301 }
4302 
returnYuvBuffer(mm_camera_buf_def_t * frame)4303 int32_t QCamera3PicChannel::returnYuvBuffer(mm_camera_buf_def_t *frame)
4304 {
4305     Mutex::Autolock lock(mFreeBuffersLock);
4306     mFreeBufferList.push_back(frame->buf_idx);
4307     return 0;
4308 }
4309 
returnYuvBufferAndEncode(mm_camera_buf_def_t * frame,buffer_handle_t * outBuffer,uint32_t frameNumber,std::shared_ptr<metadata_buffer_t> metadata,mm_camera_buf_def_t * metaFrame)4310 int32_t QCamera3PicChannel::returnYuvBufferAndEncode(mm_camera_buf_def_t *frame,
4311         buffer_handle_t *outBuffer, uint32_t frameNumber,
4312         std::shared_ptr<metadata_buffer_t> metadata, mm_camera_buf_def_t *metaFrame)
4313 {
4314     int32_t rc = OK;
4315 
4316     // Picture stream must have been started before any request comes in.
4317     if (!m_bIsActive) {
4318         LOGE("Channel not started!!");
4319         return NO_INIT;
4320     }
4321 
4322     // Set up reprocess configuration
4323     reprocess_config_t reproc_cfg = {};
4324     cam_dimension_t dim;
4325     dim.width = (int32_t)mYuvWidth;
4326     dim.height = (int32_t)mYuvHeight;
4327     setReprocConfig(reproc_cfg, nullptr, metadata.get(), mStreamFormat, dim);
4328 
4329     // Get the index of the output jpeg buffer.
4330     int index = mMemory.getMatchBufIndex((void*)outBuffer);
4331     if(index < 0) {
4332         rc = registerBuffer(outBuffer, mIsType);
4333         if (OK != rc) {
4334             LOGE("On-the-fly buffer registration failed %d",
4335                      rc);
4336             return rc;
4337         }
4338 
4339         index = mMemory.getMatchBufIndex((void*)outBuffer);
4340         if (index < 0) {
4341             LOGE("Could not find object among registered buffers");
4342             return DEAD_OBJECT;
4343         }
4344     }
4345 
4346     rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
4347     if (rc != OK) {
4348         ALOGE("%s: Marking frame number (%u) for jpeg buffer (%d) failed: %s (%d)", __FUNCTION__,
4349                 frameNumber, index, strerror(-rc), rc);
4350         return rc;
4351     }
4352 
4353     // Start postprocessor
4354     startPostProc(reproc_cfg);
4355 
4356     qcamera_hal3_jpeg_data_t *jpeg_job =
4357             (qcamera_hal3_jpeg_data_t *) calloc(1, sizeof(qcamera_hal3_jpeg_data_t));
4358     if (jpeg_job == NULL) {
4359         LOGE("No memory for jpeg job");
4360         return NO_MEMORY;
4361     }
4362 
4363     jpeg_job->jpeg_settings = (jpeg_settings_t *) calloc(1, sizeof(jpeg_settings_t));
4364     if (jpeg_job->jpeg_settings == nullptr) {
4365         LOGE("out of memory allocating jpeg_settings");
4366         return NO_MEMORY;
4367     }
4368 
4369     auto ret = initializeJpegSetting(index, metadata.get(), jpeg_job->jpeg_settings);
4370     if (ret != NO_ERROR) {
4371         return ret;
4372     }
4373 
4374     // Allocate a buffer for the YUV input. It will be freed in QCamera3PostProc.
4375     jpeg_job->src_frame =
4376             (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4377     if (jpeg_job->src_frame == nullptr) {
4378         LOGE("%s: No memory for src frame", __FUNCTION__);
4379         return NO_MEMORY;
4380     }
4381     jpeg_job->src_frame->camera_handle = m_camHandle;
4382     jpeg_job->src_frame->ch_id = getMyHandle();
4383     jpeg_job->src_frame->num_bufs = 1;
4384     jpeg_job->src_frame->bufs[0] = frame;
4385 
4386     // Allocate a buffer for the metadata. It will be freed in QCamera3PostProc.
4387     jpeg_job->src_metadata =
4388             (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4389     if (jpeg_job->src_metadata == nullptr) {
4390         LOGE("%s: No memory for metadata", __FUNCTION__);
4391         return NO_MEMORY;
4392     }
4393     jpeg_job->src_metadata->camera_handle = m_camHandle;
4394     jpeg_job->src_metadata->ch_id = getMyHandle();
4395     jpeg_job->src_metadata->num_bufs = 1;
4396     jpeg_job->src_metadata->bufs[0] = metaFrame;
4397     jpeg_job->src_metadata->bufs[0]->buffer = metadata.get();
4398     jpeg_job->metadata = metadata.get();
4399 
4400     // Start processing the jpeg job
4401     jpeg_job->hdr_plus_processing = true;
4402     rc = m_postprocessor.processJpegJob(jpeg_job);
4403     if (rc != OK) {
4404         ALOGE("%s: Post processing jpeg (frame number: %u, jpeg buffer: %d) failed: %s (%d)",
4405                 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4406         return rc;
4407     }
4408 
4409     // Queue the external metadata.
4410     {
4411         Mutex::Autolock lock(mPendingExternalMetadataLock);
4412         mPendingExternalMetadata.push_back(metadata);
4413     }
4414 
4415     return OK;
4416 }
4417 
4418 /*===========================================================================
4419  * FUNCTION   : QCamera3ReprocessChannel
4420  *
4421  * DESCRIPTION: constructor of QCamera3ReprocessChannel
4422  *
4423  * PARAMETERS :
4424  *   @cam_handle : camera handle
4425  *   @cam_ops    : ptr to camera ops table
4426  *   @pp_mask    : post-proccess feature mask
4427  *
4428  * RETURN     : none
4429  *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,void * ch_hdl)4430 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
4431                                                  uint32_t channel_handle,
4432                                                  mm_camera_ops_t *cam_ops,
4433                                                  channel_cb_routine cb_routine,
4434                                                  channel_cb_buffer_err cb_buf_err,
4435                                                  cam_padding_info_t *paddingInfo,
4436                                                  cam_feature_mask_t postprocess_mask,
4437                                                  void *userData, void *ch_hdl) :
4438     /* In case of framework reprocessing, pproc and jpeg operations could be
4439      * parallelized by allowing 1 extra buffer for reprocessing output:
4440      * ch_hdl->getNumBuffers() + 1 */
4441     QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, cb_buf_err, paddingInfo,
4442                     postprocess_mask, userData,
4443                     ((QCamera3ProcessingChannel *)ch_hdl)->getNumBuffers()
4444                               + (MAX_REPROCESS_PIPELINE_STAGES - 1)),
4445     inputChHandle(ch_hdl),
4446     mOfflineBuffersIndex(-1),
4447     mFrameLen(0),
4448     mReprocessType(REPROCESS_TYPE_NONE),
4449     m_pSrcChannel(NULL),
4450     m_pMetaChannel(NULL),
4451     mMemory(NULL),
4452     mGrallocMemory(0),
4453     mReprocessPerfMode(false)
4454 {
4455     memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
4456     mOfflineBuffersIndex = mNumBuffers -1;
4457     mOfflineMetaIndex = (int32_t) (2*mNumBuffers -1);
4458 }
4459 
4460 
4461 /*===========================================================================
4462  * FUNCTION   : QCamera3ReprocessChannel
4463  *
4464  * DESCRIPTION: constructor of QCamera3ReprocessChannel
4465  *
4466  * PARAMETERS :
4467  *   @cam_handle : camera handle
4468  *   @cam_ops    : ptr to camera ops table
4469  *   @pp_mask    : post-proccess feature mask
4470  *
4471  * RETURN     : none
4472  *==========================================================================*/
initialize(cam_is_type_t isType)4473 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
4474 {
4475     int32_t rc = NO_ERROR;
4476     mm_camera_channel_attr_t attr;
4477 
4478     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4479     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4480     attr.max_unmatched_frames = 1;
4481 
4482     m_handle = m_camOps->add_channel(m_camHandle,
4483                                       &attr,
4484                                       NULL,
4485                                       this);
4486     if (m_handle == 0) {
4487         LOGE("Add channel failed");
4488         return UNKNOWN_ERROR;
4489     }
4490 
4491     mIsType = isType;
4492     return rc;
4493 }
4494 
4495 /*===========================================================================
4496  * FUNCTION   : registerBuffer
4497  *
4498  * DESCRIPTION: register streaming buffer to the channel object
4499  *
4500  * PARAMETERS :
4501  *   @buffer     : buffer to be registered
4502  *   @isType     : the image stabilization type for the buffer
4503  *
4504  * RETURN     : int32_t type of status
4505  *              NO_ERROR  -- success
4506  *              none-zero failure code
4507  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)4508 int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer,
4509         cam_is_type_t isType)
4510 {
4511     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_REG_BUF);
4512     int rc = 0;
4513     mIsType = isType;
4514     cam_stream_type_t streamType;
4515 
4516     if (buffer == NULL) {
4517         LOGE("Error: Cannot register a NULL buffer");
4518         return BAD_VALUE;
4519     }
4520 
4521     if ((uint32_t)mGrallocMemory.getCnt() > (mNumBuffers - 1)) {
4522         LOGE("Trying to register more buffers than initially requested");
4523         return BAD_VALUE;
4524     }
4525 
4526     if (0 == m_numStreams) {
4527         rc = initialize(mIsType);
4528         if (rc != NO_ERROR) {
4529             LOGE("Couldn't initialize camera stream %d",
4530                      rc);
4531             return rc;
4532         }
4533     }
4534 
4535     streamType = mStreams[0]->getMyType();
4536     rc = mGrallocMemory.registerBuffer(buffer, streamType);
4537     if (ALREADY_EXISTS == rc) {
4538         return NO_ERROR;
4539     } else if (NO_ERROR != rc) {
4540         LOGE("Buffer %p couldn't be registered %d", buffer, rc);
4541         return rc;
4542     }
4543 
4544     return rc;
4545 }
4546 
4547 /*===========================================================================
4548  * FUNCTION   : QCamera3ReprocessChannel
4549  *
4550  * DESCRIPTION: constructor of QCamera3ReprocessChannel
4551  *
4552  * PARAMETERS :
4553  *   @cam_handle : camera handle
4554  *   @cam_ops    : ptr to camera ops table
4555  *   @pp_mask    : post-proccess feature mask
4556  *
4557  * RETURN     : none
4558  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)4559 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
4560                                   QCamera3Stream *stream)
4561 {
4562     //Got the pproc data callback. Now send to jpeg encoding
4563     uint8_t frameIndex;
4564     uint32_t resultFrameNumber;
4565     ATRACE_CALL();
4566     mm_camera_super_buf_t* frame = NULL;
4567     QCamera3ProcessingChannel *obj = (QCamera3ProcessingChannel *)inputChHandle;
4568     cam_dimension_t dim;
4569     cam_frame_len_offset_t offset;
4570 
4571     memset(&dim, 0, sizeof(dim));
4572     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
4573     if(!super_frame) {
4574          LOGE("Invalid Super buffer");
4575          return;
4576     }
4577 
4578     if(super_frame->num_bufs != 1) {
4579          LOGE("Multiple streams are not supported");
4580          return;
4581     }
4582     if(super_frame->bufs[0] == NULL ) {
4583          LOGE("Error, Super buffer frame does not contain valid buffer");
4584          return;
4585     }
4586     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4587 
4588 
4589     if (mReprocessType == REPROCESS_TYPE_JPEG) {
4590         resultFrameNumber =  mMemory->getFrameNumber(frameIndex);
4591         frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4592         if (frame == NULL) {
4593            LOGE("Error allocating memory to save received_frame structure.");
4594            if(stream) {
4595                stream->bufDone(frameIndex);
4596            }
4597            return;
4598         }
4599         LOGI("bufIndex: %u recvd from post proc",
4600                  (uint32_t)frameIndex);
4601         *frame = *super_frame;
4602 
4603         stream->getFrameDimension(dim);
4604         stream->getFrameOffset(offset);
4605         dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_JPEG);
4606         // Release offline buffers.
4607         int32_t rc = obj->releaseOfflineMemory(resultFrameNumber);
4608         if (NO_ERROR != rc) {
4609             LOGE("Error releasing offline memory %d", rc);
4610         }
4611         /* Since reprocessing is done, send the callback to release the input buffer */
4612         if (mChannelCB) {
4613             mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
4614         }
4615         obj->m_postprocessor.processPPData(frame);
4616     } else {
4617         buffer_handle_t *resultBuffer;
4618         frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4619         resultBuffer = (buffer_handle_t *)mGrallocMemory.getBufferHandle(frameIndex);
4620         resultFrameNumber = mGrallocMemory.getFrameNumber(frameIndex);
4621         int32_t rc = stream->bufRelease(frameIndex);
4622         if (NO_ERROR != rc) {
4623             LOGE("Error %d releasing stream buffer %d",
4624                      rc, frameIndex);
4625         }
4626         rc = mGrallocMemory.unregisterBuffer(frameIndex);
4627         if (NO_ERROR != rc) {
4628             LOGE("Error %d unregistering stream buffer %d",
4629                      rc, frameIndex);
4630         }
4631         obj->reprocessCbRoutine(resultBuffer, resultFrameNumber);
4632 
4633         obj->m_postprocessor.releaseOfflineBuffers(false);
4634         qcamera_hal3_pp_data_t *pp_job = obj->m_postprocessor.dequeuePPJob(resultFrameNumber);
4635         if (pp_job != NULL) {
4636             obj->m_postprocessor.releasePPJobData(pp_job);
4637         }
4638         free(pp_job);
4639         resetToCamPerfNormal(resultFrameNumber);
4640     }
4641     free(super_frame);
4642     return;
4643 }
4644 
4645 /*===========================================================================
4646  * FUNCTION   : resetToCamPerfNormal
4647  *
4648  * DESCRIPTION: Set the perf mode to normal if all the priority frames
4649  *              have been reprocessed
4650  *
4651  * PARAMETERS :
4652  *      @frameNumber: Frame number of the reprocess completed frame
4653  *
4654  * RETURN     : QCamera3StreamMem *
4655  *==========================================================================*/
resetToCamPerfNormal(uint32_t frameNumber)4656 int32_t QCamera3ReprocessChannel::resetToCamPerfNormal(uint32_t frameNumber)
4657 {
4658     int32_t rc = NO_ERROR;
4659     bool resetToPerfNormal = false;
4660     {
4661         Mutex::Autolock lock(mPriorityFramesLock);
4662         /* remove the priority frame number from the list */
4663         for (size_t i = 0; i < mPriorityFrames.size(); i++) {
4664             if (mPriorityFrames[i] == frameNumber) {
4665                 mPriorityFrames.removeAt(i);
4666             }
4667         }
4668         /* reset the perf mode if pending priority frame list is empty */
4669         if (mReprocessPerfMode && mPriorityFrames.empty()) {
4670             resetToPerfNormal = true;
4671         }
4672     }
4673     if (resetToPerfNormal) {
4674         QCamera3Stream *pStream = mStreams[0];
4675         cam_stream_parm_buffer_t param;
4676         memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
4677 
4678         param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
4679         param.perf_mode = CAM_PERF_NORMAL;
4680         rc = pStream->setParameter(param);
4681         {
4682             Mutex::Autolock lock(mPriorityFramesLock);
4683             mReprocessPerfMode = false;
4684         }
4685     }
4686     return rc;
4687 }
4688 
4689 /*===========================================================================
4690  * FUNCTION   : getStreamBufs
4691  *
4692  * DESCRIPTION: register the buffers of the reprocess channel
4693  *
4694  * PARAMETERS : none
4695  *
4696  * RETURN     : QCamera3StreamMem *
4697  *==========================================================================*/
getStreamBufs(uint32_t len)4698 QCamera3StreamMem* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
4699 {
4700     if (mReprocessType == REPROCESS_TYPE_JPEG) {
4701         mMemory = new QCamera3StreamMem(mNumBuffers);
4702         if (!mMemory) {
4703             LOGE("unable to create reproc memory");
4704             return NULL;
4705         }
4706         mFrameLen = len;
4707         return mMemory;
4708     }
4709     return &mGrallocMemory;
4710 }
4711 
4712 /*===========================================================================
4713  * FUNCTION   : putStreamBufs
4714  *
4715  * DESCRIPTION: release the reprocess channel buffers
4716  *
4717  * PARAMETERS : none
4718  *
4719  * RETURN     :
4720  *==========================================================================*/
putStreamBufs()4721 void QCamera3ReprocessChannel::putStreamBufs()
4722 {
4723    if (mReprocessType == REPROCESS_TYPE_JPEG) {
4724        mMemory->deallocate();
4725        delete mMemory;
4726        mMemory = NULL;
4727        mFreeBufferList.clear();
4728    } else {
4729        mGrallocMemory.unregisterBuffers();
4730    }
4731 }
4732 
4733 /*===========================================================================
4734  * FUNCTION   : ~QCamera3ReprocessChannel
4735  *
4736  * DESCRIPTION: destructor of QCamera3ReprocessChannel
4737  *
4738  * PARAMETERS : none
4739  *
4740  * RETURN     : none
4741  *==========================================================================*/
~QCamera3ReprocessChannel()4742 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
4743 {
4744     destroy();
4745 
4746     if (m_handle) {
4747         m_camOps->delete_channel(m_camHandle, m_handle);
4748         LOGD("deleting channel %d", m_handle);
4749         m_handle = 0;
4750     }
4751 }
4752 
4753 /*===========================================================================
4754  * FUNCTION   : start
4755  *
4756  * DESCRIPTION: start reprocess channel.
4757  *
4758  * PARAMETERS :
4759  *
4760  * RETURN     : int32_t type of status
4761  *              NO_ERROR  -- success
4762  *              none-zero failure code
4763  *==========================================================================*/
start()4764 int32_t QCamera3ReprocessChannel::start()
4765 {
4766     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_START);
4767     int32_t rc = NO_ERROR;
4768 
4769     rc = QCamera3Channel::start();
4770 
4771     if (rc == NO_ERROR) {
4772        rc = m_camOps->start_channel(m_camHandle, m_handle, /*start_sensor_streaming*/true);
4773 
4774        // Check failure
4775        if (rc != NO_ERROR) {
4776            LOGE("start_channel failed %d", rc);
4777            QCamera3Channel::stop();
4778        }
4779     }
4780     return rc;
4781 }
4782 
4783 /*===========================================================================
4784  * FUNCTION   : stop
4785  *
4786  * DESCRIPTION: stop reprocess channel.
4787  *
4788  * PARAMETERS : none
4789  *
4790  * RETURN     : int32_t type of status
4791  *              NO_ERROR  -- success
4792  *              none-zero failure code
4793  *==========================================================================*/
stop()4794 int32_t QCamera3ReprocessChannel::stop()
4795 {
4796     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_STOP);
4797     int32_t rc = NO_ERROR;
4798 
4799     rc = QCamera3Channel::stop();
4800     rc |= m_camOps->stop_channel(m_camHandle, m_handle, /*stop_channel_immediately*/false);
4801     // Unmapping the buffers
4802     unmapOfflineBuffers(true);
4803     return rc;
4804 }
4805 
4806 /*===========================================================================
4807  * FUNCTION   : getStreamBySrcHandle
4808  *
4809  * DESCRIPTION: find reprocess stream by its source stream handle
4810  *
4811  * PARAMETERS :
4812  *   @srcHandle : source stream handle
4813  *
4814  * RETURN     : ptr to reprocess stream if found. NULL if not found
4815  *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)4816 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
4817 {
4818     QCamera3Stream *pStream = NULL;
4819 
4820     for (uint32_t i = 0; i < m_numStreams; i++) {
4821         if (mSrcStreamHandles[i] == srcHandle) {
4822             pStream = mStreams[i];
4823             break;
4824         }
4825     }
4826     return pStream;
4827 }
4828 
4829 /*===========================================================================
4830  * FUNCTION   : getSrcStreamBySrcHandle
4831  *
4832  * DESCRIPTION: find source stream by source stream handle
4833  *
4834  * PARAMETERS :
4835  *   @srcHandle : source stream handle
4836  *
4837  * RETURN     : ptr to reprocess stream if found. NULL if not found
4838  *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)4839 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
4840 {
4841     QCamera3Stream *pStream = NULL;
4842 
4843     if (NULL == m_pSrcChannel) {
4844         return NULL;
4845     }
4846 
4847     for (uint32_t i = 0; i < m_numStreams; i++) {
4848         if (mSrcStreamHandles[i] == srcHandle) {
4849             pStream = m_pSrcChannel->getStreamByIndex(i);
4850             break;
4851         }
4852     }
4853     return pStream;
4854 }
4855 
4856 /*===========================================================================
4857  * FUNCTION   : unmapOfflineBuffers
4858  *
4859  * DESCRIPTION: Unmaps offline buffers
4860  *
4861  * PARAMETERS : none
4862  *
4863  * RETURN     : int32_t type of status
4864  *              NO_ERROR  -- success
4865  *              none-zero failure code
4866  *==========================================================================*/
unmapOfflineBuffers(bool all)4867 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
4868 {
4869     int rc = NO_ERROR;
4870     Mutex::Autolock l(mOfflineBuffersLock);
4871     if (!mOfflineBuffers.empty()) {
4872         QCamera3Stream *stream = NULL;
4873         List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
4874         for (; it != mOfflineBuffers.end(); it++) {
4875            stream = (*it).stream;
4876            if (NULL != stream) {
4877                rc = stream->unmapBuf((*it).type,
4878                                      (*it).index,
4879                                         -1);
4880                if (NO_ERROR != rc) {
4881                    LOGE("Error during offline buffer unmap %d",
4882                           rc);
4883                }
4884                LOGD("Unmapped buffer with index %d", (*it).index);
4885            }
4886            if (!all) {
4887                mOfflineBuffers.erase(it);
4888                break;
4889            }
4890         }
4891         if (all) {
4892            mOfflineBuffers.clear();
4893         }
4894     }
4895 
4896     if (!mOfflineMetaBuffers.empty()) {
4897         QCamera3Stream *stream = NULL;
4898         List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
4899         for (; it != mOfflineMetaBuffers.end(); it++) {
4900            stream = (*it).stream;
4901            if (NULL != stream) {
4902                rc = stream->unmapBuf((*it).type,
4903                                      (*it).index,
4904                                         -1);
4905                if (NO_ERROR != rc) {
4906                    LOGE("Error during offline buffer unmap %d",
4907                           rc);
4908                }
4909                LOGD("Unmapped meta buffer with index %d", (*it).index);
4910            }
4911            if (!all) {
4912                mOfflineMetaBuffers.erase(it);
4913                break;
4914            }
4915         }
4916         if (all) {
4917            mOfflineMetaBuffers.clear();
4918         }
4919     }
4920     return rc;
4921 }
4922 
4923 /*===========================================================================
4924  * FUNCTION   : bufDone
4925  *
4926  * DESCRIPTION: Return reprocess stream buffer to free buffer list.
4927  *              Note that this function doesn't queue buffer back to kernel.
4928  *              It's up to doReprocessOffline to do that instead.
4929  * PARAMETERS :
4930  *   @recvd_frame  : stream buf frame to be returned
4931  *
4932  * RETURN     : int32_t type of status
4933  *              NO_ERROR  -- success
4934  *              none-zero failure code
4935  *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)4936 int32_t QCamera3ReprocessChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
4937 {
4938     int rc = NO_ERROR;
4939     if (recvd_frame && recvd_frame->num_bufs == 1) {
4940         Mutex::Autolock lock(mFreeBuffersLock);
4941         uint32_t buf_idx = recvd_frame->bufs[0]->buf_idx;
4942         mFreeBufferList.push_back(buf_idx);
4943 
4944     } else {
4945         LOGE("Fatal. Not supposed to be here");
4946         rc = BAD_VALUE;
4947     }
4948 
4949     return rc;
4950 }
4951 
4952 /*===========================================================================
4953  * FUNCTION   : overrideMetadata
4954  *
4955  * DESCRIPTION: Override metadata entry such as rotation, crop, and CDS info.
4956  *
4957  * PARAMETERS :
4958  *   @frame     : input frame from source stream
4959  *   meta_buffer: metadata buffer
4960  *   @metadata  : corresponding metadata
4961  *   @fwk_frame :
4962  *
4963  * RETURN     : int32_t type of status
4964  *              NO_ERROR  -- success
4965  *              none-zero failure code
4966  *==========================================================================*/
overrideMetadata(qcamera_hal3_pp_buffer_t * pp_buffer,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)4967 int32_t QCamera3ReprocessChannel::overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer,
4968         mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
4969         qcamera_fwk_input_pp_data_t &fwk_frame)
4970 {
4971     int32_t rc = NO_ERROR;
4972     QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
4973     if ((NULL == meta_buffer) || (NULL == pp_buffer) || (NULL == pp_buffer->input) ||
4974             (NULL == hal_obj)) {
4975         return BAD_VALUE;
4976     }
4977 
4978     metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
4979     mm_camera_super_buf_t *frame = pp_buffer->input;
4980     if (NULL == meta) {
4981         return BAD_VALUE;
4982     }
4983 
4984     for (uint32_t i = 0; i < frame->num_bufs; i++) {
4985         QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
4986         QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
4987 
4988         if (pStream != NULL && pSrcStream != NULL) {
4989             if (jpeg_settings) {
4990                 // Find rotation info for reprocess stream
4991                 cam_rotation_info_t rotation_info;
4992                 memset(&rotation_info, 0, sizeof(rotation_info));
4993                 if (jpeg_settings->jpeg_orientation == 0) {
4994                    rotation_info.rotation = ROTATE_0;
4995                 } else if (jpeg_settings->jpeg_orientation == 90) {
4996                    rotation_info.rotation = ROTATE_90;
4997                 } else if (jpeg_settings->jpeg_orientation == 180) {
4998                    rotation_info.rotation = ROTATE_180;
4999                 } else if (jpeg_settings->jpeg_orientation == 270) {
5000                    rotation_info.rotation = ROTATE_270;
5001                 }
5002 
5003                 rotation_info.device_rotation = ROTATE_0;
5004                 rotation_info.streamId = mStreams[0]->getMyServerID();
5005                 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
5006             }
5007 
5008             // Find and insert crop info for reprocess stream
5009             IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
5010                 if (MAX_NUM_STREAMS > crop_data->num_of_streams) {
5011                     for (int j = 0; j < crop_data->num_of_streams; j++) {
5012                         if (crop_data->crop_info[j].stream_id ==
5013                                 pSrcStream->getMyServerID()) {
5014 
5015                             // Store crop/roi information for offline reprocess
5016                             // in the reprocess stream slot
5017                             crop_data->crop_info[crop_data->num_of_streams].crop =
5018                                     crop_data->crop_info[j].crop;
5019                             crop_data->crop_info[crop_data->num_of_streams].roi_map =
5020                                     crop_data->crop_info[j].roi_map;
5021                             crop_data->crop_info[crop_data->num_of_streams].stream_id =
5022                                     mStreams[0]->getMyServerID();
5023                             crop_data->num_of_streams++;
5024 
5025                             LOGD("Reprocess stream server id: %d",
5026                                      mStreams[0]->getMyServerID());
5027                             LOGD("Found offline reprocess crop %dx%d %dx%d",
5028                                     crop_data->crop_info[j].crop.left,
5029                                     crop_data->crop_info[j].crop.top,
5030                                     crop_data->crop_info[j].crop.width,
5031                                     crop_data->crop_info[j].crop.height);
5032                             LOGD("Found offline reprocess roimap %dx%d %dx%d",
5033                                     crop_data->crop_info[j].roi_map.left,
5034                                     crop_data->crop_info[j].roi_map.top,
5035                                     crop_data->crop_info[j].roi_map.width,
5036                                     crop_data->crop_info[j].roi_map.height);
5037 
5038                             break;
5039                         }
5040                     }
5041                 } else {
5042                     LOGE("No space to add reprocess stream crop/roi information");
5043                 }
5044             }
5045 
5046             IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
5047                 uint8_t cnt = cdsInfo->num_of_streams;
5048                 if (cnt <= MAX_NUM_STREAMS) {
5049                     cam_stream_cds_info_t repro_cds_info;
5050                     memset(&repro_cds_info, 0, sizeof(repro_cds_info));
5051                     repro_cds_info.stream_id = mStreams[0]->getMyServerID();
5052                     for (size_t i = 0; i < cnt; i++) {
5053                         if (cdsInfo->cds_info[i].stream_id ==
5054                                 pSrcStream->getMyServerID()) {
5055                             repro_cds_info.cds_enable =
5056                                     cdsInfo->cds_info[i].cds_enable;
5057                             break;
5058                         }
5059                     }
5060                     cdsInfo->num_of_streams = 1;
5061                     cdsInfo->cds_info[0] = repro_cds_info;
5062                 } else {
5063                     LOGE("No space to add reprocess stream cds information");
5064                 }
5065             }
5066 
5067             fwk_frame.input_buffer = *frame->bufs[i];
5068             fwk_frame.metadata_buffer = *meta_buffer;
5069             fwk_frame.output_buffer = pp_buffer->output;
5070             break;
5071         } else {
5072             LOGE("Source/Re-process streams are invalid");
5073             rc |= BAD_VALUE;
5074         }
5075     }
5076 
5077     return rc;
5078 }
5079 
5080 /*===========================================================================
5081 * FUNCTION : overrideFwkMetadata
5082 *
5083 * DESCRIPTION: Override frameworks metadata such as rotation, crop, and CDS data.
5084 *
5085 * PARAMETERS :
5086 * @frame : input frame for reprocessing
5087 *
5088 * RETURN : int32_t type of status
5089 * NO_ERROR -- success
5090 * none-zero failure code
5091 *==========================================================================*/
overrideFwkMetadata(qcamera_fwk_input_pp_data_t * frame)5092 int32_t QCamera3ReprocessChannel::overrideFwkMetadata(
5093         qcamera_fwk_input_pp_data_t *frame)
5094 {
5095     if (NULL == frame) {
5096         LOGE("Incorrect input frame");
5097         return BAD_VALUE;
5098     }
5099 
5100     if (NULL == frame->metadata_buffer.buffer) {
5101         LOGE("No metadata available");
5102         return BAD_VALUE;
5103     }
5104     metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
5105 
5106     // Not doing rotation at all for YUV to YUV reprocess
5107     if (mReprocessType != REPROCESS_TYPE_JPEG) {
5108         LOGD("Override rotation to 0 for channel reprocess type %d",
5109                 mReprocessType);
5110         cam_rotation_info_t rotation_info;
5111         memset(&rotation_info, 0, sizeof(rotation_info));
5112         rotation_info.rotation = ROTATE_0;
5113         rotation_info.streamId = mStreams[0]->getMyServerID();
5114         ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
5115     }
5116 
5117     // Find and insert crop info for reprocess stream
5118     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
5119         if (1 == crop_data->num_of_streams) {
5120             // Store crop/roi information for offline reprocess
5121             // in the reprocess stream slot
5122             crop_data->crop_info[crop_data->num_of_streams].crop =
5123                     crop_data->crop_info[0].crop;
5124             crop_data->crop_info[crop_data->num_of_streams].roi_map =
5125                     crop_data->crop_info[0].roi_map;
5126             crop_data->crop_info[crop_data->num_of_streams].stream_id =
5127                     mStreams[0]->getMyServerID();
5128             crop_data->num_of_streams++;
5129 
5130             LOGD("Reprocess stream server id: %d",
5131                      mStreams[0]->getMyServerID());
5132             LOGD("Found offline reprocess crop %dx%d %dx%d",
5133                     crop_data->crop_info[0].crop.left,
5134                     crop_data->crop_info[0].crop.top,
5135                     crop_data->crop_info[0].crop.width,
5136                     crop_data->crop_info[0].crop.height);
5137             LOGD("Found offline reprocess roi map %dx%d %dx%d",
5138                     crop_data->crop_info[0].roi_map.left,
5139                     crop_data->crop_info[0].roi_map.top,
5140                     crop_data->crop_info[0].roi_map.width,
5141                     crop_data->crop_info[0].roi_map.height);
5142         } else {
5143             LOGE("Incorrect number of offline crop data entries %d",
5144                     crop_data->num_of_streams);
5145             return BAD_VALUE;
5146         }
5147     } else {
5148         LOGW("Crop data not present");
5149     }
5150 
5151     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
5152         if (1 == cdsInfo->num_of_streams) {
5153             cdsInfo->cds_info[0].stream_id = mStreams[0]->getMyServerID();
5154         } else {
5155             LOGE("Incorrect number of offline cds info entries %d",
5156                      cdsInfo->num_of_streams);
5157             return BAD_VALUE;
5158         }
5159     }
5160 
5161     return NO_ERROR;
5162 }
5163 
5164 /*===========================================================================
5165  * FUNCTION   : doReprocessOffline
5166  *
5167  * DESCRIPTION: request to do a reprocess on the frame
5168  *
5169  * PARAMETERS :
5170  *   @frame     : input frame for reprocessing
5171  *   @isPriorityFrame: Hint that this frame is of priority, equivalent to
5172  *              real time, even though it is processed in offline mechanism
5173  *
5174  * RETURN     : int32_t type of status
5175  *              NO_ERROR  -- success
5176  *              none-zero failure code
5177  *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame,bool isPriorityFrame)5178  int32_t  QCamera3ReprocessChannel::doReprocessOffline(
5179         qcamera_fwk_input_pp_data_t *frame, bool isPriorityFrame)
5180 {
5181     int32_t rc = 0;
5182     int index;
5183     OfflineBuffer mappedBuffer;
5184     ATRACE_CALL();
5185 
5186     if (m_numStreams < 1) {
5187         LOGE("No reprocess stream is created");
5188         return -1;
5189     }
5190 
5191     if (NULL == frame) {
5192         LOGE("Incorrect input frame");
5193         return BAD_VALUE;
5194     }
5195 
5196     if (NULL == frame->metadata_buffer.buffer) {
5197         LOGE("No metadata available");
5198         return BAD_VALUE;
5199     }
5200 
5201     if (0 > frame->input_buffer.fd) {
5202         LOGE("No input buffer available");
5203         return BAD_VALUE;
5204     }
5205 
5206     if ((0 == m_numStreams) || (NULL == mStreams[0])) {
5207         LOGE("Reprocess stream not initialized!");
5208         return NO_INIT;
5209     }
5210 
5211     QCamera3Stream *pStream = mStreams[0];
5212 
5213     //qbuf the output buffer if it was allocated by the framework
5214     if (mReprocessType != REPROCESS_TYPE_JPEG && frame->output_buffer != NULL) {
5215         index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5216         if(index < 0) {
5217             rc = registerBuffer(frame->output_buffer, mIsType);
5218             if (NO_ERROR != rc) {
5219                 LOGE("On-the-fly buffer registration failed %d",
5220                          rc);
5221                 return rc;
5222             }
5223 
5224             index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5225             if (index < 0) {
5226                 LOGE("Could not find object among registered buffers");
5227                 return DEAD_OBJECT;
5228             }
5229         }
5230         rc = mGrallocMemory.markFrameNumber(index, frame->frameNumber);
5231         if(rc != NO_ERROR) {
5232             LOGE("Failed to mark frame#:%d, index:%d",frame->frameNumber,index);
5233             return rc;
5234         }
5235         if(!m_bIsActive) {
5236             rc = start();
5237             if (NO_ERROR != rc) {
5238                 return rc;
5239             }
5240         } else {
5241             rc = pStream->bufDone(index);
5242             if(rc != NO_ERROR) {
5243                 LOGE("Failed to Q new buffer to stream %d", rc);
5244                 mGrallocMemory.markFrameNumber(index, -1);
5245                 return rc;
5246             }
5247         }
5248 
5249     } else if (mReprocessType == REPROCESS_TYPE_JPEG) {
5250         Mutex::Autolock lock(mFreeBuffersLock);
5251         uint32_t bufIdx;
5252         if (mFreeBufferList.empty()) {
5253             rc = mMemory->allocateOne(mFrameLen);
5254             if (rc < 0) {
5255                 LOGE("Failed allocating heap buffer. Fatal");
5256                 return BAD_VALUE;
5257             } else {
5258                 bufIdx = (uint32_t)rc;
5259             }
5260         } else {
5261             bufIdx = *(mFreeBufferList.begin());
5262             mFreeBufferList.erase(mFreeBufferList.begin());
5263         }
5264 
5265         mMemory->markFrameNumber(bufIdx, frame->frameNumber);
5266         rc = pStream->bufDone(bufIdx);
5267         if (rc != NO_ERROR) {
5268             LOGE("Failed to queue new buffer to stream");
5269             return rc;
5270         }
5271     }
5272 
5273     int32_t max_idx = (int32_t) (mNumBuffers - 1);
5274     //loop back the indices if max burst count reached
5275     if (mOfflineBuffersIndex == max_idx) {
5276        mOfflineBuffersIndex = -1;
5277     }
5278     uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1);
5279 
5280     //Do cache ops before sending for reprocess
5281     if (mMemory != NULL) {
5282         mMemory->cleanInvalidateCache(buf_idx);
5283     }
5284 
5285     rc = pStream->mapBuf(
5286             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5287             buf_idx, -1,
5288             frame->input_buffer.fd, frame->input_buffer.buffer,
5289             frame->input_buffer.frame_len);
5290     if (NO_ERROR == rc) {
5291         Mutex::Autolock l(mOfflineBuffersLock);
5292         mappedBuffer.index = buf_idx;
5293         mappedBuffer.stream = pStream;
5294         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
5295         mOfflineBuffers.push_back(mappedBuffer);
5296         mOfflineBuffersIndex = (int32_t)buf_idx;
5297         LOGD("Mapped buffer with index %d", mOfflineBuffersIndex);
5298     }
5299 
5300     max_idx = (int32_t) ((mNumBuffers * 2) - 1);
5301     //loop back the indices if max burst count reached
5302     if (mOfflineMetaIndex == max_idx) {
5303        mOfflineMetaIndex = (int32_t) (mNumBuffers - 1);
5304     }
5305     uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1);
5306     rc |= pStream->mapBuf(
5307             CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
5308             meta_buf_idx, -1,
5309             frame->metadata_buffer.fd, frame->metadata_buffer.buffer,
5310             frame->metadata_buffer.frame_len);
5311     if (NO_ERROR == rc) {
5312         Mutex::Autolock l(mOfflineBuffersLock);
5313         mappedBuffer.index = meta_buf_idx;
5314         mappedBuffer.stream = pStream;
5315         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
5316         mOfflineMetaBuffers.push_back(mappedBuffer);
5317         mOfflineMetaIndex = (int32_t)meta_buf_idx;
5318         LOGD("Mapped meta buffer with index %d", mOfflineMetaIndex);
5319     }
5320 
5321     if (rc == NO_ERROR) {
5322         cam_stream_parm_buffer_t param;
5323         uint32_t numPendingPriorityFrames = 0;
5324 
5325         if(isPriorityFrame && (mReprocessType != REPROCESS_TYPE_JPEG)) {
5326             Mutex::Autolock lock(mPriorityFramesLock);
5327             /* read the length before pushing the frame number to check if
5328              * vector is empty */
5329             numPendingPriorityFrames = mPriorityFrames.size();
5330             mPriorityFrames.push(frame->frameNumber);
5331         }
5332 
5333         if(isPriorityFrame && !numPendingPriorityFrames &&
5334             (mReprocessType != REPROCESS_TYPE_JPEG)) {
5335             memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
5336             param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
5337             param.perf_mode = CAM_PERF_HIGH_PERFORMANCE;
5338             rc = pStream->setParameter(param);
5339             if (rc != NO_ERROR) {
5340                 LOGE("%s: setParameter for CAM_PERF_HIGH_PERFORMANCE failed",
5341                     __func__);
5342             }
5343             {
5344                 Mutex::Autolock lock(mPriorityFramesLock);
5345                 mReprocessPerfMode = true;
5346             }
5347         }
5348 
5349         memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
5350         param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5351         param.reprocess.buf_index = buf_idx;
5352         param.reprocess.frame_idx = frame->input_buffer.frame_idx;
5353         param.reprocess.meta_present = 1;
5354         param.reprocess.meta_buf_index = meta_buf_idx;
5355 
5356         LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d",
5357                     param.reprocess.frame_idx, param.reprocess.buf_index,
5358                     param.reprocess.meta_buf_index);
5359         rc = pStream->setParameter(param);
5360         if (rc != NO_ERROR) {
5361             LOGE("stream setParameter for reprocess failed");
5362             resetToCamPerfNormal(frame->frameNumber);
5363         }
5364     } else {
5365         LOGE("Input buffer memory map failed: %d", rc);
5366     }
5367 
5368     return rc;
5369 }
5370 
5371 /*===========================================================================
5372  * FUNCTION   : doReprocess
5373  *
5374  * DESCRIPTION: request to do a reprocess on the frame
5375  *
5376  * PARAMETERS :
5377  *   @buf_fd     : fd to the input buffer that needs reprocess
5378  *   @buffer     : Buffer ptr
5379  *   @buf_lenght : length of the input buffer
5380  *   @ret_val    : result of reprocess.
5381  *                 Example: Could be faceID in case of register face image.
5382  *   @meta_frame : metadata frame.
5383  *
5384  * RETURN     : int32_t type of status
5385  *              NO_ERROR  -- success
5386  *              none-zero failure code
5387  *==========================================================================*/
doReprocess(int buf_fd,void * buffer,size_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)5388 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, void *buffer, size_t buf_length,
5389         int32_t &ret_val, mm_camera_super_buf_t *meta_frame)
5390 {
5391     int32_t rc = 0;
5392     if (m_numStreams < 1) {
5393         LOGE("No reprocess stream is created");
5394         return -1;
5395     }
5396     if (meta_frame == NULL) {
5397         LOGE("Did not get corresponding metadata in time");
5398         return -1;
5399     }
5400 
5401     uint8_t buf_idx = 0;
5402     for (uint32_t i = 0; i < m_numStreams; i++) {
5403         rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5404                                  buf_idx, -1,
5405                                  buf_fd, buffer, buf_length);
5406 
5407         //Do cache ops before sending for reprocess
5408         if (mMemory != NULL) {
5409             mMemory->cleanInvalidateCache(buf_idx);
5410         }
5411 
5412         if (rc == NO_ERROR) {
5413             cam_stream_parm_buffer_t param;
5414             memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
5415             param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5416             param.reprocess.buf_index = buf_idx;
5417             param.reprocess.meta_present = 1;
5418             param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
5419             param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
5420 
5421             LOGI("Online reprocessing id = %d buf Id = %d meta index = %d",
5422                     param.reprocess.frame_idx, param.reprocess.buf_index,
5423                     param.reprocess.meta_buf_index);
5424             rc = mStreams[i]->setParameter(param);
5425             if (rc == NO_ERROR) {
5426                 ret_val = param.reprocess.ret_val;
5427             }
5428             mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5429                                   buf_idx, -1);
5430         }
5431     }
5432     return rc;
5433 }
5434 
5435 /*===========================================================================
5436  * FUNCTION   : addReprocStreamsFromSource
5437  *
5438  * DESCRIPTION: add reprocess streams from input source channel
5439  *
5440  * PARAMETERS :
5441  *   @config         : pp feature configuration
5442  *   @src_config     : source reprocess configuration
5443  *   @isType         : type of image stabilization required on this stream
5444  *   @pMetaChannel   : ptr to metadata channel to get corresp. metadata
5445  *
5446  *
5447  * RETURN     : int32_t type of status
5448  *              NO_ERROR  -- success
5449  *              none-zero failure code
5450  *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)5451 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
5452         const reprocess_config_t &src_config , cam_is_type_t is_type,
5453         QCamera3Channel *pMetaChannel)
5454 {
5455     int32_t rc = 0;
5456     cam_stream_reproc_config_t reprocess_config;
5457     cam_stream_type_t streamType;
5458 
5459     cam_dimension_t streamDim = src_config.output_stream_dim;
5460 
5461     if (NULL != src_config.src_channel) {
5462         QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
5463         if (pSrcStream == NULL) {
5464            LOGE("source channel doesn't have a stream");
5465            return BAD_VALUE;
5466         }
5467         mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
5468     }
5469 
5470     streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
5471     reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
5472 
5473     reprocess_config.offline.input_fmt = src_config.input_stream_format;
5474     reprocess_config.offline.input_dim = src_config.input_stream_dim;
5475     reprocess_config.offline.input_buf_planes.plane_info =
5476             src_config.input_stream_plane_info.plane_info;
5477     reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers;
5478     reprocess_config.offline.input_type = src_config.stream_type;
5479 
5480     LOGH("input_fmt is %d, fmt is %d, input_dim is %d x %d", reprocess_config.offline.input_fmt,
5481          src_config.stream_format, reprocess_config.offline.input_dim.width,
5482          reprocess_config.offline.input_dim.height);
5483     reprocess_config.pp_feature_config = pp_config;
5484     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
5485             m_handle,
5486             m_camOps,
5487             &mPaddingInfo,
5488             (QCamera3Channel*)this,
5489             false/*mapStreamBuffers*/);
5490     if (pStream == NULL) {
5491         LOGE("No mem for Stream");
5492         return NO_MEMORY;
5493     }
5494 
5495     rc = pStream->init(streamType, src_config.stream_format,
5496             streamDim, ROTATE_0, &reprocess_config,
5497             (uint8_t)mNumBuffers,
5498             reprocess_config.pp_feature_config.feature_mask,
5499             is_type,
5500             0,/* batchSize */
5501             QCamera3Channel::streamCbRoutine, this);
5502 
5503     if (rc == 0) {
5504         mStreams[m_numStreams] = pStream;
5505         m_numStreams++;
5506     } else {
5507         LOGE("failed to create reprocess stream");
5508         delete pStream;
5509     }
5510 
5511     if (rc == NO_ERROR) {
5512         m_pSrcChannel = src_config.src_channel;
5513         m_pMetaChannel = pMetaChannel;
5514         mReprocessType = src_config.reprocess_type;
5515         LOGD("mReprocessType is %d", mReprocessType);
5516     }
5517     mm_camera_req_buf_t buf;
5518     memset(&buf, 0x0, sizeof(buf));
5519     buf.type = MM_CAMERA_REQ_SUPER_BUF;
5520     buf.num_buf_requested = 1;
5521     if(m_camOps->request_super_buf(m_camHandle,m_handle, &buf) < 0) {
5522         LOGE("Request for super buffer failed");
5523     }
5524     return rc;
5525 }
5526 
5527 /* QCamera3SupportChannel methods */
5528 
5529 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
5530 
QCamera3SupportChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,cam_stream_type_t streamType,cam_dimension_t * dim,cam_format_t streamFormat,uint8_t hw_analysis_supported,cam_color_filter_arrangement_t color_arrangement,void * userData,uint32_t numBuffers)5531 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
5532                     uint32_t channel_handle,
5533                     mm_camera_ops_t *cam_ops,
5534                     cam_padding_info_t *paddingInfo,
5535                     cam_feature_mask_t postprocess_mask,
5536                     cam_stream_type_t streamType,
5537                     cam_dimension_t *dim,
5538                     cam_format_t streamFormat,
5539                     uint8_t hw_analysis_supported,
5540                     cam_color_filter_arrangement_t color_arrangement,
5541                     void *userData, uint32_t numBuffers) :
5542                         QCamera3Channel(cam_handle, channel_handle, cam_ops,
5543                                 NULL, NULL, paddingInfo, postprocess_mask,
5544                                 userData, numBuffers),
5545                         mMemory(NULL),
5546                         mHwAnalysisSupported(hw_analysis_supported),
5547                         mColorArrangement(color_arrangement)
5548 {
5549     memcpy(&mDim, dim, sizeof(cam_dimension_t));
5550     mStreamType = streamType;
5551     mStreamFormat = streamFormat;
5552 }
5553 
~QCamera3SupportChannel()5554 QCamera3SupportChannel::~QCamera3SupportChannel()
5555 {
5556     destroy();
5557 
5558     if (mMemory) {
5559         mMemory->deallocate();
5560         delete mMemory;
5561         mMemory = NULL;
5562     }
5563 }
5564 
initialize(cam_is_type_t isType)5565 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
5566 {
5567     int32_t rc;
5568 
5569     if (mMemory || m_numStreams > 0) {
5570         LOGE("QCamera3SupportChannel already initialized");
5571         return -EINVAL;
5572     }
5573 
5574     mIsType = isType;
5575    // Make Analysis same as Preview format
5576    if (!mHwAnalysisSupported && mStreamType == CAM_STREAM_TYPE_ANALYSIS &&
5577            mColorArrangement != CAM_FILTER_ARRANGEMENT_Y) {
5578         mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
5579                 mDim.width, mDim.height, m_bUBWCenable, mIsType);
5580    }
5581 
5582     rc = QCamera3Channel::addStream(mStreamType,
5583             mStreamFormat, mDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM,
5584             mPostProcMask, mIsType);
5585     if (rc < 0) {
5586         LOGE("addStream failed");
5587     }
5588     return rc;
5589 }
5590 
request(buffer_handle_t *,uint32_t,int &)5591 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
5592                                                 uint32_t /*frameNumber*/,
5593                                                 int & /*indexUsed*/)
5594 {
5595     return NO_ERROR;
5596 }
5597 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)5598 void QCamera3SupportChannel::streamCbRoutine(
5599                         mm_camera_super_buf_t *super_frame,
5600                         QCamera3Stream * /*stream*/)
5601 {
5602     if (super_frame == NULL || super_frame->num_bufs != 1) {
5603         LOGE("super_frame is not valid");
5604         return;
5605     }
5606     bufDone(super_frame);
5607     free(super_frame);
5608 }
5609 
getStreamBufs(uint32_t len)5610 QCamera3StreamMem* QCamera3SupportChannel::getStreamBufs(uint32_t len)
5611 {
5612     int rc;
5613     mMemory = new QCamera3StreamMem(mNumBuffers);
5614     if (!mMemory) {
5615         LOGE("unable to create heap memory");
5616         return NULL;
5617     }
5618     rc = mMemory->allocateAll(len);
5619     if (rc < 0) {
5620         LOGE("unable to allocate heap memory");
5621         delete mMemory;
5622         mMemory = NULL;
5623         return NULL;
5624     }
5625     return mMemory;
5626 }
5627 
putStreamBufs()5628 void QCamera3SupportChannel::putStreamBufs()
5629 {
5630     mMemory->deallocate();
5631     delete mMemory;
5632     mMemory = NULL;
5633 }
5634 
~QCamera3DepthChannel()5635 QCamera3DepthChannel::~QCamera3DepthChannel() {
5636     unmapAllBuffers();
5637 }
5638 
5639 /*===========================================================================
5640  * FUNCTION   : mapBuffer
5641  *
5642  * DESCRIPTION: Maps stream depth buffer
5643  *
5644  * PARAMETERS :
5645  *   @buffer       : Depth buffer
5646  *   @frameNumber  : Frame number
5647  *
5648  *
5649  * RETURN     : int32_t type of status
5650  *              NO_ERROR  -- success
5651  *              none-zero failure code
5652  *==========================================================================*/
mapBuffer(buffer_handle_t * buffer,uint32_t frameNumber)5653 int32_t QCamera3DepthChannel::mapBuffer(buffer_handle_t *buffer,
5654         uint32_t frameNumber) {
5655     int32_t rc = NO_ERROR;
5656 
5657     int32_t index = mGrallocMem.getMatchBufIndex((void*)buffer);
5658     if (0 > index) {
5659         rc = mGrallocMem.registerBuffer(buffer, CAM_STREAM_TYPE_DEFAULT);
5660         if (NO_ERROR != rc) {
5661             LOGE("Buffer registration failed %d", rc);
5662             return rc;
5663         }
5664 
5665         index = mGrallocMem.getMatchBufIndex((void*)buffer);
5666         if (index < 0) {
5667             LOGE("Could not find object among registered buffers");
5668             return DEAD_OBJECT;
5669         }
5670     } else {
5671         LOGE("Buffer: %p is already present at index: %d!", buffer, index);
5672         return ALREADY_EXISTS;
5673     }
5674 
5675     rc = mGrallocMem.markFrameNumber((uint32_t)index, frameNumber);
5676 
5677     return rc;
5678 }
5679 
5680 /*===========================================================================
5681  * FUNCTION   : populateDepthData
5682  *
5683  * DESCRIPTION: Copies the incoming depth data in the respective depth buffer
5684  *
5685  * PARAMETERS :
5686  *   @data         : Incoming Depth data
5687  *   @frameNumber  : Frame number of incoming depth data
5688  *
5689  *
5690  * RETURN     : int32_t type of status
5691  *              NO_ERROR  -- success
5692  *              none-zero failure code
5693  *==========================================================================*/
populateDepthData(const cam_depth_data_t & data,uint32_t frameNumber)5694 int32_t QCamera3DepthChannel::populateDepthData(const cam_depth_data_t &data,
5695         uint32_t frameNumber) {
5696     if (nullptr == mStream) {
5697         LOGE("Invalid depth stream!");
5698         return BAD_VALUE;
5699     }
5700 
5701     ssize_t length = data.length;
5702     int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5703     if (0 > index) {
5704         LOGE("Frame number: %u not present!");
5705         return BAD_VALUE;
5706     }
5707 
5708     void *dst = mGrallocMem.getPtr(index);
5709     if (nullptr == dst) {
5710         LOGE("Invalid mapped buffer");
5711         return BAD_VALUE;
5712     }
5713 
5714     camera3_jpeg_blob_t jpegHeader;
5715     ssize_t headerSize = sizeof jpegHeader;
5716     buffer_handle_t *blobBufferHandle = static_cast<buffer_handle_t *>
5717             (mGrallocMem.getBufferHandle(index));
5718     ssize_t maxBlobSize;
5719     if (nullptr != blobBufferHandle) {
5720         maxBlobSize = ((private_handle_t*)(*blobBufferHandle))->width;
5721     } else {
5722         LOGE("Couldn't query buffer handle!");
5723         return BAD_VALUE;
5724     }
5725 
5726     if ((length + headerSize) > maxBlobSize) {
5727         LOGE("Depth buffer size mismatch expected: %d actual: %d",
5728                 (length + headerSize), maxBlobSize);
5729         return BAD_VALUE;
5730     }
5731 
5732     if (0 < length) {
5733         memcpy(dst, data.depth_data, length);
5734     }
5735 
5736     memset(&jpegHeader, 0, headerSize);
5737     jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
5738     jpegHeader.jpeg_size = length;
5739     size_t jpeg_eof_offset = static_cast<size_t> (maxBlobSize - headerSize);
5740     uint8_t *jpegBuffer = static_cast<uint8_t *> (dst);
5741     uint8_t *jpegEOF = &jpegBuffer[jpeg_eof_offset];
5742     memcpy(jpegEOF, &jpegHeader, headerSize);
5743 
5744     return NO_ERROR;
5745 }
5746 
5747 /*===========================================================================
5748  * FUNCTION   : getOldestFrame
5749  *
5750  * DESCRIPTION: Return oldest mapped buffer
5751  *
5752  * PARAMETERS :
5753  *   @frameNumber         : Sets oldest frame number if present
5754  *
5755  *
5756  * RETURN     : buffer_handle_t pointer
5757  *              NULL in case of error
5758  *==========================================================================*/
getOldestFrame(uint32_t & frameNumber)5759 buffer_handle_t *QCamera3DepthChannel::getOldestFrame(uint32_t &frameNumber) {
5760     uint32_t oldestIndex = UINT32_MAX;
5761     int32_t frameNumberResult = mGrallocMem.getOldestFrameNumber(oldestIndex);
5762     if (0 > frameNumberResult) {
5763         LOGD("Invalid frame number!");
5764         return nullptr;
5765     }
5766     frameNumber = static_cast<uint32_t> (frameNumberResult);
5767 
5768     buffer_handle_t *ret = static_cast<buffer_handle_t *>
5769             (mGrallocMem.getBufferHandle(oldestIndex));
5770     if (nullptr == ret) {
5771         LOGE("Invalid buffer handle!");
5772         return nullptr;
5773     }
5774 
5775     return ret;
5776 }
5777 
5778 /*===========================================================================
5779  * FUNCTION   : unmapBuffer
5780  *
5781  * DESCRIPTION: Unmap a single buffer
5782  *
5783  * PARAMETERS :
5784  *   @frameNumber         : Frame number of buffer that should get unmapped
5785  *
5786  *
5787  * RETURN     : int32_t type of status
5788  *              NO_ERROR  -- success
5789  *              none-zero failure code
5790  *==========================================================================*/
unmapBuffer(uint32_t frameNumber)5791 int32_t QCamera3DepthChannel::unmapBuffer(uint32_t frameNumber) {
5792     int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5793     if (0 > index) {
5794         LOGE("Frame number: %u not present!", frameNumber);
5795         return BAD_VALUE;
5796     }
5797 
5798     return mGrallocMem.unregisterBuffer(index);
5799 }
5800 
5801 /*===========================================================================
5802  * FUNCTION   : unmapAllBuffers
5803  *
5804  * DESCRIPTION: This will unmap all buffers
5805  *
5806  * PARAMETERS :
5807  *
5808  * RETURN     : int32_t type of status
5809  *              NO_ERROR  -- success
5810  *              none-zero failure code
5811  *==========================================================================*/
unmapAllBuffers()5812 int32_t QCamera3DepthChannel::unmapAllBuffers() {
5813     mGrallocMem.unregisterBuffers();
5814 
5815     return NO_ERROR;
5816 }
5817 
5818 }; // namespace qcamera
5819