1 /*
2 **
3 ** Copyright 2008, The Android Open Source Project
4 ** Copyright 2012, Samsung Electronics Co. LTD
5 **
6 ** Licensed under the Apache License, Version 2.0 (the "License");
7 ** you may not use this file except in compliance with the License.
8 ** You may obtain a copy of the License at
9 **
10 **     http://www.apache.org/licenses/LICENSE-2.0
11 **
12 ** Unless required by applicable law or agreed to in writing, software
13 ** distributed under the License is distributed on an "AS IS" BASIS,
14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 ** See the License for the specific language governing permissions and
16 ** limitations under the License.
17 */
18 
19 /*!
20  * \file      ExynosCameraHWInterface2.cpp
21  * \brief     source file for Android Camera API 2.0 HAL
22  * \author    Sungjoong Kang(sj3.kang@samsung.com)
23  * \date      2012/07/10
24  *
25  * <b>Revision History: </b>
26  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27  *   Initial Release
28  *
29  * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30  *   2nd Release
31  *
32  */
33 
34 //#define LOG_NDEBUG 0
35 #define LOG_TAG "ExynosCameraHAL2"
36 #include <sys/time.h>
37 #include <utils/Log.h>
38 #include <math.h>
39 
40 #include "ExynosCameraHWInterface2.h"
41 #include "exynos_format.h"
42 
43 namespace android {
44 
m_savePostView(const char * fname,uint8_t * buf,uint32_t size)45 void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
46 {
47     int nw;
48     int cnt = 0;
49     uint32_t written = 0;
50 
51     ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
52     int fd = open(fname, O_RDWR | O_CREAT, 0644);
53     if (fd < 0) {
54         ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
55         return;
56     }
57 
58     ALOGV("writing %d bytes to file [%s]", size, fname);
59     while (written < size) {
60         nw = ::write(fd, buf + written, size - written);
61         if (nw < 0) {
62             ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
63             break;
64         }
65         written += nw;
66         cnt++;
67     }
68     ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
69     ::close(fd);
70 }
71 
get_pixel_depth(uint32_t fmt)72 int get_pixel_depth(uint32_t fmt)
73 {
74     int depth = 0;
75 
76     switch (fmt) {
77     case V4L2_PIX_FMT_JPEG:
78         depth = 8;
79         break;
80 
81     case V4L2_PIX_FMT_NV12:
82     case V4L2_PIX_FMT_NV21:
83     case V4L2_PIX_FMT_YUV420:
84     case V4L2_PIX_FMT_YVU420M:
85     case V4L2_PIX_FMT_NV12M:
86     case V4L2_PIX_FMT_NV12MT:
87         depth = 12;
88         break;
89 
90     case V4L2_PIX_FMT_RGB565:
91     case V4L2_PIX_FMT_YUYV:
92     case V4L2_PIX_FMT_YVYU:
93     case V4L2_PIX_FMT_UYVY:
94     case V4L2_PIX_FMT_VYUY:
95     case V4L2_PIX_FMT_NV16:
96     case V4L2_PIX_FMT_NV61:
97     case V4L2_PIX_FMT_YUV422P:
98     case V4L2_PIX_FMT_SBGGR10:
99     case V4L2_PIX_FMT_SBGGR12:
100     case V4L2_PIX_FMT_SBGGR16:
101         depth = 16;
102         break;
103 
104     case V4L2_PIX_FMT_RGB32:
105         depth = 32;
106         break;
107     default:
108         ALOGE("Get depth failed(format : %d)", fmt);
109         break;
110     }
111 
112     return depth;
113 }
114 
cam_int_s_fmt(node_info_t * node)115 int cam_int_s_fmt(node_info_t *node)
116 {
117     struct v4l2_format v4l2_fmt;
118     unsigned int framesize;
119     int ret;
120 
121     memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
122 
123     v4l2_fmt.type = node->type;
124     framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
125 
126     if (node->planes >= 1) {
127         v4l2_fmt.fmt.pix_mp.width       = node->width;
128         v4l2_fmt.fmt.pix_mp.height      = node->height;
129         v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
130         v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
131     } else {
132         ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
133     }
134 
135     /* Set up for capture */
136     ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
137 
138     if (ret < 0)
139         ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
140 
141 
142     return ret;
143 }
144 
cam_int_reqbufs(node_info_t * node)145 int cam_int_reqbufs(node_info_t *node)
146 {
147     struct v4l2_requestbuffers req;
148     int ret;
149 
150     req.count = node->buffers;
151     req.type = node->type;
152     req.memory = node->memory;
153 
154     ret = exynos_v4l2_reqbufs(node->fd, &req);
155 
156     if (ret < 0)
157         ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
158 
159     return req.count;
160 }
161 
cam_int_qbuf(node_info_t * node,int index)162 int cam_int_qbuf(node_info_t *node, int index)
163 {
164     struct v4l2_buffer v4l2_buf;
165     struct v4l2_plane planes[VIDEO_MAX_PLANES];
166     int i;
167     int ret = 0;
168 
169     v4l2_buf.m.planes   = planes;
170     v4l2_buf.type       = node->type;
171     v4l2_buf.memory     = node->memory;
172     v4l2_buf.index      = index;
173     v4l2_buf.length     = node->planes;
174 
175     for(i = 0; i < node->planes; i++){
176         v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
177         v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
178     }
179 
180     ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
181 
182     if (ret < 0)
183         ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
184 
185     return ret;
186 }
187 
cam_int_streamon(node_info_t * node)188 int cam_int_streamon(node_info_t *node)
189 {
190     enum v4l2_buf_type type = node->type;
191     int ret;
192 
193 
194     ret = exynos_v4l2_streamon(node->fd, type);
195 
196     if (ret < 0)
197         ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
198 
199     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
200 
201     return ret;
202 }
203 
cam_int_streamoff(node_info_t * node)204 int cam_int_streamoff(node_info_t *node)
205 {
206     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
207     int ret;
208 
209 
210     ALOGV("Off streaming I/O... fd(%d)", node->fd);
211     ret = exynos_v4l2_streamoff(node->fd, type);
212 
213     if (ret < 0)
214         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
215 
216     return ret;
217 }
218 
isp_int_streamoff(node_info_t * node)219 int isp_int_streamoff(node_info_t *node)
220 {
221     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
222     int ret;
223 
224     ALOGV("Off streaming I/O... fd(%d)", node->fd);
225     ret = exynos_v4l2_streamoff(node->fd, type);
226 
227     if (ret < 0)
228         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
229 
230     return ret;
231 }
232 
cam_int_dqbuf(node_info_t * node)233 int cam_int_dqbuf(node_info_t *node)
234 {
235     struct v4l2_buffer v4l2_buf;
236     struct v4l2_plane planes[VIDEO_MAX_PLANES];
237     int ret;
238 
239     v4l2_buf.type       = node->type;
240     v4l2_buf.memory     = node->memory;
241     v4l2_buf.m.planes   = planes;
242     v4l2_buf.length     = node->planes;
243 
244     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
245     if (ret < 0)
246         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
247 
248     return v4l2_buf.index;
249 }
250 
cam_int_dqbuf(node_info_t * node,int num_plane)251 int cam_int_dqbuf(node_info_t *node, int num_plane)
252 {
253     struct v4l2_buffer v4l2_buf;
254     struct v4l2_plane planes[VIDEO_MAX_PLANES];
255     int ret;
256 
257     v4l2_buf.type       = node->type;
258     v4l2_buf.memory     = node->memory;
259     v4l2_buf.m.planes   = planes;
260     v4l2_buf.length     = num_plane;
261 
262     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
263     if (ret < 0)
264         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
265 
266     return v4l2_buf.index;
267 }
268 
cam_int_s_input(node_info_t * node,int index)269 int cam_int_s_input(node_info_t *node, int index)
270 {
271     int ret;
272 
273     ret = exynos_v4l2_s_input(node->fd, index);
274     if (ret < 0)
275         ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
276 
277     return ret;
278 }
279 
280 
281 gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
282 
RequestManager(SignalDrivenThread * main_thread)283 RequestManager::RequestManager(SignalDrivenThread* main_thread):
284     m_vdisEnable(false),
285     m_lastCompletedFrameCnt(-1),
286     m_lastAeMode(0),
287     m_lastAaMode(0),
288     m_lastAwbMode(0),
289     m_lastAeComp(0),
290     m_vdisBubbleEn(false)
291 {
292     m_metadataConverter = new MetadataConverter;
293     m_mainThread = main_thread;
294     ResetEntry();
295     m_sensorPipelineSkipCnt = 0;
296     return;
297 }
298 
~RequestManager()299 RequestManager::~RequestManager()
300 {
301     ALOGV("%s", __FUNCTION__);
302     if (m_metadataConverter != NULL) {
303         delete m_metadataConverter;
304         m_metadataConverter = NULL;
305     }
306 
307     releaseSensorQ();
308     return;
309 }
310 
ResetEntry()311 void RequestManager::ResetEntry()
312 {
313     Mutex::Autolock lock(m_requestMutex);
314     Mutex::Autolock lock2(m_numOfEntriesLock);
315     for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
316         memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
317         entries[i].internal_shot.shot.ctl.request.frameCount = -1;
318     }
319     m_numOfEntries = 0;
320     m_entryInsertionIndex = -1;
321     m_entryProcessingIndex = -1;
322     m_entryFrameOutputIndex = -1;
323 }
324 
GetNumEntries()325 int RequestManager::GetNumEntries()
326 {
327     Mutex::Autolock lock(m_numOfEntriesLock);
328     return m_numOfEntries;
329 }
330 
SetDefaultParameters(int cropX)331 void RequestManager::SetDefaultParameters(int cropX)
332 {
333     m_cropX = cropX;
334 }
335 
IsRequestQueueFull()336 bool RequestManager::IsRequestQueueFull()
337 {
338     Mutex::Autolock lock(m_requestMutex);
339     Mutex::Autolock lock2(m_numOfEntriesLock);
340     if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
341         return true;
342     else
343         return false;
344 }
345 
RegisterRequest(camera_metadata_t * new_request,int * afMode,uint32_t * afRegion)346 void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
347 {
348     ALOGV("DEBUG(%s):", __FUNCTION__);
349 
350     Mutex::Autolock lock(m_requestMutex);
351     Mutex::Autolock lock2(m_numOfEntriesLock);
352 
353     request_manager_entry * newEntry = NULL;
354     int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
355     ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
356 
357 
358     newEntry = &(entries[newInsertionIndex]);
359 
360     if (newEntry->status!=EMPTY) {
361         ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
362         return;
363     }
364     newEntry->status = REGISTERED;
365     newEntry->original_request = new_request;
366     memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
367     m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
368     newEntry->output_stream_count = 0;
369     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
370         newEntry->output_stream_count++;
371 
372     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
373         newEntry->output_stream_count++;
374 
375     m_numOfEntries++;
376     m_entryInsertionIndex = newInsertionIndex;
377 
378 
379     *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
380     afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
381     afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
382     afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
383     afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
384     ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
385     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
386 }
387 
DeregisterRequest(camera_metadata_t ** deregistered_request)388 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
389 {
390     ALOGV("DEBUG(%s):", __FUNCTION__);
391     int frame_index;
392     request_manager_entry * currentEntry;
393 
394     Mutex::Autolock lock(m_requestMutex);
395     Mutex::Autolock lock2(m_numOfEntriesLock);
396 
397     frame_index = GetCompletedIndex();
398     currentEntry =  &(entries[frame_index]);
399     if (currentEntry->status != COMPLETED) {
400         CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
401                        m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
402         return;
403     }
404     if (deregistered_request)  *deregistered_request = currentEntry->original_request;
405 
406     m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
407 
408     currentEntry->status = EMPTY;
409     currentEntry->original_request = NULL;
410     memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
411     currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
412     currentEntry->output_stream_count = 0;
413     m_numOfEntries--;
414     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
415      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
416 
417     CheckCompleted(GetNextIndex(frame_index));
418     return;
419 }
420 
PrepareFrame(size_t * num_entries,size_t * frame_size,camera_metadata_t ** prepared_frame,int afState)421 bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
422                 camera_metadata_t ** prepared_frame, int afState)
423 {
424     ALOGV("DEBUG(%s):", __FUNCTION__);
425     Mutex::Autolock lock(m_requestMutex);
426     status_t res = NO_ERROR;
427     int tempFrameOutputIndex = GetCompletedIndex();
428     request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
429     ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
430         m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
431 
432     if (currentEntry->status != COMPLETED) {
433         ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
434 
435         return false;
436     }
437     m_entryFrameOutputIndex = tempFrameOutputIndex;
438     m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
439     add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
440     res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
441                 m_tempFrameMetadata);
442     if (res!=NO_ERROR) {
443         ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
444         return false;
445     }
446     *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
447     *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
448     *prepared_frame = m_tempFrameMetadata;
449     ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
450         currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
451     // Dump();
452     return true;
453 }
454 
MarkProcessingRequest(ExynosBuffer * buf)455 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
456 {
457     struct camera2_shot_ext * shot_ext;
458     struct camera2_shot_ext * request_shot;
459     int targetStreamIndex = 0;
460     request_manager_entry * newEntry = NULL;
461     static int count = 0;
462 
463     Mutex::Autolock lock(m_requestMutex);
464     Mutex::Autolock lock2(m_numOfEntriesLock);
465     if (m_numOfEntries == 0)  {
466         CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
467         return -1;
468     }
469 
470     if ((m_entryProcessingIndex == m_entryInsertionIndex)
471         && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
472         ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
473          m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
474         return -1;
475     }
476 
477     int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
478     ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
479 
480     newEntry = &(entries[newProcessingIndex]);
481     request_shot = &(newEntry->internal_shot);
482     if (newEntry->status != REGISTERED) {
483         CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
484         for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
485                 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
486         }
487         return -1;
488     }
489 
490     newEntry->status = REQUESTED;
491 
492     shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
493 
494     memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
495     shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
496     shot_ext->request_sensor = 1;
497     shot_ext->dis_bypass = 1;
498     shot_ext->dnr_bypass = 1;
499     shot_ext->fd_bypass = 1;
500     shot_ext->setfile = 0;
501 
502     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
503     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
504     if (targetStreamIndex & MASK_OUTPUT_SCP)
505         shot_ext->request_scp = 1;
506 
507     if (targetStreamIndex & MASK_OUTPUT_SCC)
508         shot_ext->request_scc = 1;
509 
510     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
511         shot_ext->fd_bypass = 0;
512 
513     if (count == 0){
514         shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
515     } else
516         shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
517 
518     count++;
519     shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
520     shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
521     shot_ext->shot.magicNumber = 0x23456789;
522     shot_ext->shot.ctl.sensor.exposureTime = 0;
523     shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
524     shot_ext->shot.ctl.sensor.sensitivity = 0;
525 
526 
527     shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
528     shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
529     shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
530 
531     m_entryProcessingIndex = newProcessingIndex;
532     return newProcessingIndex;
533 }
534 
NotifyStreamOutput(int frameCnt)535 void RequestManager::NotifyStreamOutput(int frameCnt)
536 {
537     int index;
538 
539     Mutex::Autolock lock(m_requestMutex);
540     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
541 
542     index = FindEntryIndexByFrameCnt(frameCnt);
543     if (index == -1) {
544         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
545         return;
546     }
547     ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
548 
549     entries[index].output_stream_count--;  //TODO : match stream id also
550     CheckCompleted(index);
551 }
552 
CheckCompleted(int index)553 void RequestManager::CheckCompleted(int index)
554 {
555     if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
556         && (entries[index].output_stream_count <= 0)){
557         ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
558                 index, entries[index].internal_shot.shot.ctl.request.frameCount );
559         entries[index].status = COMPLETED;
560         if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount)
561             m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
562     }
563 }
564 
GetCompletedIndex()565 int RequestManager::GetCompletedIndex()
566 {
567     return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
568 }
569 
pushSensorQ(int index)570 void  RequestManager::pushSensorQ(int index)
571 {
572     Mutex::Autolock lock(m_requestMutex);
573     m_sensorQ.push_back(index);
574 }
575 
popSensorQ()576 int RequestManager::popSensorQ()
577 {
578    List<int>::iterator sensor_token;
579    int index;
580 
581     Mutex::Autolock lock(m_requestMutex);
582 
583     if(m_sensorQ.size() == 0)
584         return -1;
585 
586     sensor_token = m_sensorQ.begin()++;
587     index = *sensor_token;
588     m_sensorQ.erase(sensor_token);
589 
590     return (index);
591 }
592 
releaseSensorQ()593 void RequestManager::releaseSensorQ()
594 {
595     List<int>::iterator r;
596 
597     Mutex::Autolock lock(m_requestMutex);
598     ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
599 
600     while(m_sensorQ.size() > 0){
601         r  = m_sensorQ.begin()++;
602         m_sensorQ.erase(r);
603     }
604     return;
605 }
606 
ApplyDynamicMetadata(struct camera2_shot_ext * shot_ext)607 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
608 {
609     int index;
610     struct camera2_shot_ext * request_shot;
611     nsecs_t timeStamp;
612     int i;
613 
614     Mutex::Autolock lock(m_requestMutex);
615     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
616 
617     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
618         if (entries[i].internal_shot.shot.ctl.request.frameCount
619                 == shot_ext->shot.ctl.request.frameCount) {
620             if (entries[i].status == CAPTURED) {
621                 entries[i].status = METADONE;
622                 break;
623             }
624             if (entries[i].status == METADONE) {
625                 return;
626             }
627         }
628     }
629 
630     if (i == NUM_MAX_REQUEST_MGR_ENTRY){
631         ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
632         return;
633     }
634 
635     request_manager_entry * newEntry = &(entries[i]);
636     request_shot = &(newEntry->internal_shot);
637 
638     timeStamp = request_shot->shot.dm.sensor.timeStamp;
639     memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
640     request_shot->shot.dm.sensor.timeStamp = timeStamp;
641     m_lastTimeStamp = timeStamp;
642     CheckCompleted(i);
643 }
644 
UpdateIspParameters(struct camera2_shot_ext * shot_ext,int frameCnt,ctl_request_info_t * ctl_info)645 void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
646 {
647     int index, targetStreamIndex;
648     struct camera2_shot_ext * request_shot;
649 
650     ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
651     if (frameCnt < 0)
652         return;
653 
654     index = FindEntryIndexByFrameCnt(frameCnt);
655     if (index == -1) {
656         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
657         return;
658     }
659 
660     request_manager_entry * newEntry = &(entries[index]);
661     request_shot = &(newEntry->internal_shot);
662     memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
663     shot_ext->shot.ctl.request.frameCount = frameCnt;
664     shot_ext->request_sensor = 1;
665     shot_ext->dis_bypass = 1;
666     shot_ext->dnr_bypass = 1;
667     shot_ext->fd_bypass = 1;
668     shot_ext->drc_bypass = 1;
669     shot_ext->setfile = 0;
670 
671     shot_ext->request_scc = 0;
672     shot_ext->request_scp = 0;
673 
674     shot_ext->isReprocessing = request_shot->isReprocessing;
675     shot_ext->reprocessInput = request_shot->reprocessInput;
676     shot_ext->shot.ctl.request.outputStreams[0] = 0;
677 
678     shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
679 
680     shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
681     shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
682     shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
683 
684     // mapping flash UI mode from aeMode
685     if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
686         if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
687             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
688         else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
689             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
690         request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
691     }
692 
693     // Apply ae/awb lock or unlock
694     if (request_shot->ae_lock == AEMODE_LOCK_ON)
695             request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
696     if (request_shot->awb_lock == AWBMODE_LOCK_ON)
697             request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
698 
699     if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
700         shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
701     }
702     else {
703         shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
704         m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
705     }
706     if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
707         shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
708     }
709     else {
710         shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
711         m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
712     }
713     if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
714         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
715     }
716     else {
717         shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
718         m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
719     }
720     if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
721         shot_ext->shot.ctl.aa.aeExpCompensation = 0;
722     }
723     else {
724         shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
725         m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
726     }
727 
728     if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) {
729         m_vdisBubbleEn = true;
730         shot_ext->dis_bypass = 0;
731         shot_ext->dnr_bypass = 0;
732     } else {
733         m_vdisBubbleEn = false;
734         shot_ext->dis_bypass = 1;
735         shot_ext->dnr_bypass = 1;
736     }
737 
738     shot_ext->shot.ctl.aa.afTrigger = 0;
739 
740     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
741     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
742     if (targetStreamIndex & MASK_OUTPUT_SCP)
743         shot_ext->request_scp = 1;
744 
745     if (targetStreamIndex & MASK_OUTPUT_SCC)
746         shot_ext->request_scc = 1;
747 
748     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
749         shot_ext->fd_bypass = 0;
750 
751     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
752     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
753 
754     ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
755     (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
756     (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
757     (int)(shot_ext->shot.ctl.aa.afMode));
758 }
759 
IsVdisEnable(void)760 bool    RequestManager::IsVdisEnable(void)
761 {
762         return m_vdisBubbleEn;
763 }
764 
FindEntryIndexByFrameCnt(int frameCnt)765 int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
766 {
767     for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
768         if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
769             return i;
770     }
771     return -1;
772 }
773 
RegisterTimestamp(int frameCnt,nsecs_t * frameTime)774 void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
775 {
776     int index = FindEntryIndexByFrameCnt(frameCnt);
777     if (index == -1) {
778         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
779         return;
780     }
781 
782     request_manager_entry * currentEntry = &(entries[index]);
783     if (currentEntry->internal_shot.isReprocessing == 1) {
784         ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
785         index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
786     } else {
787         currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
788         ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
789             index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
790     }
791 }
792 
793 
GetTimestampByFrameCnt(int frameCnt)794 nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
795 {
796     int index = FindEntryIndexByFrameCnt(frameCnt);
797     if (index == -1) {
798         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
799         return m_lastTimeStamp;
800     }
801     else
802         return GetTimestamp(index);
803 }
804 
GetTimestamp(int index)805 nsecs_t  RequestManager::GetTimestamp(int index)
806 {
807     Mutex::Autolock lock(m_requestMutex);
808     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
809         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
810         return 0;
811     }
812 
813     request_manager_entry * currentEntry = &(entries[index]);
814     nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
815     if (frameTime == 0) {
816         ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
817         frameTime = m_lastTimeStamp;
818     }
819     ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
820     return frameTime;
821 }
822 
GetOutputStreamByFrameCnt(int frameCnt)823 uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
824 {
825     int index = FindEntryIndexByFrameCnt(frameCnt);
826     if (index == -1) {
827         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
828         return 0;
829     }
830     else
831         return GetOutputStream(index);
832 }
833 
GetOutputStream(int index)834 uint8_t  RequestManager::GetOutputStream(int index)
835 {
836     Mutex::Autolock lock(m_requestMutex);
837     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
838         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
839         return 0;
840     }
841 
842     request_manager_entry * currentEntry = &(entries[index]);
843     return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
844 }
845 
GetInternalShotExtByFrameCnt(int frameCnt)846 camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
847 {
848     int index = FindEntryIndexByFrameCnt(frameCnt);
849     if (index == -1) {
850         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
851         return 0;
852     }
853     else
854         return GetInternalShotExt(index);
855 }
856 
GetInternalShotExt(int index)857 camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
858 {
859     Mutex::Autolock lock(m_requestMutex);
860     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
861         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
862         return 0;
863     }
864 
865     request_manager_entry * currentEntry = &(entries[index]);
866     return &currentEntry->internal_shot;
867 }
868 
FindFrameCnt(struct camera2_shot_ext * shot_ext,bool drain)869 int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext, bool drain)
870 {
871     Mutex::Autolock lock(m_requestMutex);
872     Mutex::Autolock lock2(m_numOfEntriesLock);
873     int i;
874 
875     if (m_numOfEntries == 0) {
876         CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
877         return -1;
878     }
879 
880     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
881         if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
882             continue;
883 
884         if (entries[i].status == REQUESTED) {
885             entries[i].status = CAPTURED;
886             return entries[i].internal_shot.shot.ctl.request.frameCount;
887         }
888         if (drain && (entries[i].status >= CAPTURED)) {
889             return entries[i].internal_shot.shot.ctl.request.frameCount;
890         }
891         CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
892 
893     }
894     CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
895 
896     return -1;
897 }
898 
SetInitialSkip(int count)899 void     RequestManager::SetInitialSkip(int count)
900 {
901     ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
902     if (count > m_sensorPipelineSkipCnt)
903         m_sensorPipelineSkipCnt = count;
904 }
905 
GetSkipCnt()906 int     RequestManager::GetSkipCnt()
907 {
908     ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
909     if (m_sensorPipelineSkipCnt == 0)
910         return m_sensorPipelineSkipCnt;
911     else
912         return --m_sensorPipelineSkipCnt;
913 }
914 
Dump(void)915 void RequestManager::Dump(void)
916 {
917     int i = 0;
918     request_manager_entry * currentEntry;
919     Mutex::Autolock lock(m_numOfEntriesLock);
920     ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
921     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
922 
923     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
924         currentEntry =  &(entries[i]);
925         ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
926         currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
927             currentEntry->output_stream_count,
928             currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
929     }
930 }
931 
GetNextIndex(int index)932 int     RequestManager::GetNextIndex(int index)
933 {
934     index++;
935     if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
936         index = 0;
937 
938     return index;
939 }
940 
GetPrevIndex(int index)941 int     RequestManager::GetPrevIndex(int index)
942 {
943     index--;
944     if (index < 0)
945         index = NUM_MAX_REQUEST_MGR_ENTRY-1;
946 
947     return index;
948 }
949 
ExynosCameraHWInterface2(int cameraId,camera2_device_t * dev,ExynosCamera2 * camera,int * openInvalid)950 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
951             m_requestQueueOps(NULL),
952             m_frameQueueOps(NULL),
953             m_callbackCookie(NULL),
954             m_numOfRemainingReqInSvc(0),
955             m_isRequestQueuePending(false),
956             m_isRequestQueueNull(true),
957             m_halDevice(dev),
958             m_ionCameraClient(0),
959             m_isIspStarted(false),
960             m_sccLocalBufferValid(false),
961             m_cameraId(cameraId),
962             m_scp_closing(false),
963             m_scp_closed(false),
964             m_wideAspect(false),
965             m_zoomRatio(1),
966             m_vdisBubbleCnt(0),
967             m_vdisDupFrame(0),
968             m_jpegEncodingCount(0),
969             m_scpForceSuspended(false),
970             m_afState(HAL_AFSTATE_INACTIVE),
971             m_afTriggerId(0),
972             m_afMode(NO_CHANGE),
973             m_afMode2(NO_CHANGE),
974             m_IsAfModeUpdateRequired(false),
975             m_IsAfTriggerRequired(false),
976             m_IsAfLockRequired(false),
977             m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
978             m_afPendingTriggerId(0),
979             m_afModeWaitingCnt(0),
980             m_scpOutputSignalCnt(0),
981             m_scpOutputImageCnt(0),
982             m_nightCaptureCnt(0),
983             m_nightCaptureFrameCnt(0),
984             m_lastSceneMode(0),
985             m_thumbNailW(160),
986             m_thumbNailH(120)
987 {
988     ALOGD("(%s): ENTER", __FUNCTION__);
989     int ret = 0;
990     int res = 0;
991 
992     m_exynosPictureCSC = NULL;
993     m_exynosVideoCSC = NULL;
994 
995     if (!m_grallocHal) {
996         ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
997         if (ret)
998             ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
999     }
1000 
1001     m_camera2 = camera;
1002     m_ionCameraClient = createIonClient(m_ionCameraClient);
1003     if(m_ionCameraClient == 0)
1004         ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
1005 
1006 
1007     m_BayerManager = new BayerBufManager();
1008     m_mainThread    = new MainThread(this);
1009     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
1010     *openInvalid = InitializeISPChain();
1011     if (*openInvalid < 0) {
1012         ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
1013         // clean process
1014         // 1. close video nodes
1015         // SCP
1016         res = exynos_v4l2_close(m_camera_info.scp.fd);
1017         if (res != NO_ERROR ) {
1018             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1019         }
1020         // SCC
1021         res = exynos_v4l2_close(m_camera_info.capture.fd);
1022         if (res != NO_ERROR ) {
1023             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1024         }
1025         // Sensor
1026         res = exynos_v4l2_close(m_camera_info.sensor.fd);
1027         if (res != NO_ERROR ) {
1028             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1029         }
1030         // ISP
1031         res = exynos_v4l2_close(m_camera_info.isp.fd);
1032         if (res != NO_ERROR ) {
1033             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1034         }
1035     } else {
1036         m_sensorThread  = new SensorThread(this);
1037         m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1038         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1039         ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1040 
1041         for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1042             m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
1043         CSC_METHOD cscMethod = CSC_METHOD_HW;
1044         m_exynosPictureCSC = csc_init(cscMethod);
1045         if (m_exynosPictureCSC == NULL)
1046             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1047         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1048         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1049 
1050         m_exynosVideoCSC = csc_init(cscMethod);
1051         if (m_exynosVideoCSC == NULL)
1052             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1053         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1054         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1055 
1056         m_setExifFixedAttribute();
1057 
1058         // contol information clear
1059         // flash
1060         m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1061         m_ctlInfo.flash.m_afFlashDoneFlg= false;
1062         m_ctlInfo.flash.m_flashEnableFlg = false;
1063         m_ctlInfo.flash.m_flashFrameCount = 0;
1064         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
1065         m_ctlInfo.flash.m_flashTimeOut = 0;
1066         m_ctlInfo.flash.m_flashDecisionResult = false;
1067         m_ctlInfo.flash.m_flashTorchMode = false;
1068         m_ctlInfo.flash.m_precaptureState = 0;
1069         m_ctlInfo.flash.m_precaptureTriggerId = 0;
1070         // ae
1071         m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1072         // af
1073         m_ctlInfo.af.m_afTriggerTimeOut = 0;
1074         // scene
1075         m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1076     }
1077     ALOGD("(%s): EXIT", __FUNCTION__);
1078 }
1079 
~ExynosCameraHWInterface2()1080 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1081 {
1082     ALOGD("(%s): ENTER", __FUNCTION__);
1083     this->release();
1084     ALOGD("(%s): EXIT", __FUNCTION__);
1085 }
1086 
release()1087 void ExynosCameraHWInterface2::release()
1088 {
1089     int i, res;
1090     ALOGD("(HAL2::release): ENTER");
1091 
1092     if (m_streamThreads[1] != NULL) {
1093         m_streamThreads[1]->release();
1094         m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1095     }
1096 
1097     if (m_streamThreads[0] != NULL) {
1098         m_streamThreads[0]->release();
1099         m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1100     }
1101 
1102     if (m_sensorThread != NULL) {
1103         m_sensorThread->release();
1104     }
1105 
1106     if (m_mainThread != NULL) {
1107         m_mainThread->release();
1108     }
1109 
1110     if (m_exynosPictureCSC)
1111         csc_deinit(m_exynosPictureCSC);
1112     m_exynosPictureCSC = NULL;
1113 
1114     if (m_exynosVideoCSC)
1115         csc_deinit(m_exynosVideoCSC);
1116     m_exynosVideoCSC = NULL;
1117 
1118     if (m_streamThreads[1] != NULL) {
1119         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1120         while (!m_streamThreads[1]->IsTerminated())
1121             usleep(SIG_WAITING_TICK);
1122         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1123         m_streamThreads[1] = NULL;
1124     }
1125 
1126     if (m_streamThreads[0] != NULL) {
1127         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1128         while (!m_streamThreads[0]->IsTerminated())
1129             usleep(SIG_WAITING_TICK);
1130         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1131         m_streamThreads[0] = NULL;
1132     }
1133 
1134     if (m_sensorThread != NULL) {
1135         ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1136         while (!m_sensorThread->IsTerminated())
1137             usleep(SIG_WAITING_TICK);
1138         ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1139         m_sensorThread = NULL;
1140     }
1141 
1142     if (m_mainThread != NULL) {
1143         ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1144         while (!m_mainThread->IsTerminated())
1145             usleep(SIG_WAITING_TICK);
1146         ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1147         m_mainThread = NULL;
1148     }
1149 
1150     if (m_requestManager != NULL) {
1151         delete m_requestManager;
1152         m_requestManager = NULL;
1153     }
1154 
1155     if (m_BayerManager != NULL) {
1156         delete m_BayerManager;
1157         m_BayerManager = NULL;
1158     }
1159     for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1160         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1161 
1162     if (m_sccLocalBufferValid) {
1163         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1164 #ifdef ENABLE_FRAME_SYNC
1165             freeCameraMemory(&m_sccLocalBuffer[i], 2);
1166 #else
1167             freeCameraMemory(&m_sccLocalBuffer[i], 1);
1168 #endif
1169     }
1170     else {
1171         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1172             freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1173     }
1174 
1175     ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1176     res = exynos_v4l2_close(m_camera_info.sensor.fd);
1177     if (res != NO_ERROR ) {
1178         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1179     }
1180 
1181     ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1182     res = exynos_v4l2_close(m_camera_info.isp.fd);
1183     if (res != NO_ERROR ) {
1184         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1185     }
1186 
1187     ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1188     res = exynos_v4l2_close(m_camera_info.capture.fd);
1189     if (res != NO_ERROR ) {
1190         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1191     }
1192 
1193     ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1194     res = exynos_v4l2_close(m_camera_info.scp.fd);
1195     if (res != NO_ERROR ) {
1196         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1197     }
1198     ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1199     deleteIonClient(m_ionCameraClient);
1200 
1201     ALOGD("(HAL2::release): EXIT");
1202 }
1203 
InitializeISPChain()1204 int ExynosCameraHWInterface2::InitializeISPChain()
1205 {
1206     char node_name[30];
1207     int fd = 0;
1208     int i;
1209     int ret = 0;
1210 
1211     /* Open Sensor */
1212     memset(&node_name, 0x00, sizeof(char[30]));
1213     sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1214     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1215 
1216     if (fd < 0) {
1217         ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1218     }
1219     else {
1220         ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1221     }
1222     m_camera_info.sensor.fd = fd;
1223 
1224     /* Open ISP */
1225     memset(&node_name, 0x00, sizeof(char[30]));
1226     sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1227     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1228 
1229     if (fd < 0) {
1230         ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1231     }
1232     else {
1233         ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1234     }
1235     m_camera_info.isp.fd = fd;
1236 
1237     /* Open ScalerC */
1238     memset(&node_name, 0x00, sizeof(char[30]));
1239     sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1240     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1241 
1242     if (fd < 0) {
1243         ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1244     }
1245     else {
1246         ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1247     }
1248     m_camera_info.capture.fd = fd;
1249 
1250     /* Open ScalerP */
1251     memset(&node_name, 0x00, sizeof(char[30]));
1252     sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1253     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1254     if (fd < 0) {
1255         ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1256     }
1257     else {
1258         ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1259     }
1260     m_camera_info.scp.fd = fd;
1261 
1262     if(m_cameraId == 0)
1263         m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1264     else
1265         m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1266 
1267     memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1268     m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1269     m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1270 
1271     m_camera_info.dummy_shot.dis_bypass = 1;
1272     m_camera_info.dummy_shot.dnr_bypass = 1;
1273     m_camera_info.dummy_shot.fd_bypass = 1;
1274 
1275     /*sensor setting*/
1276     m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1277     m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1278     m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1279 
1280     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1281     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1282 
1283     /*request setting*/
1284     m_camera_info.dummy_shot.request_sensor = 1;
1285     m_camera_info.dummy_shot.request_scc = 0;
1286     m_camera_info.dummy_shot.request_scp = 0;
1287     m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1288 
1289     m_camera_info.sensor.width = m_camera2->getSensorRawW();
1290     m_camera_info.sensor.height = m_camera2->getSensorRawH();
1291 
1292     m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1293     m_camera_info.sensor.planes = 2;
1294     m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1295     m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1296     m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1297 
1298     for(i = 0; i < m_camera_info.sensor.buffers; i++){
1299         int res;
1300         initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1301         m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1302         m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1303         res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1304         if (res) {
1305             ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
1306             // Free allocated sensor buffers
1307             for (int j = 0; j < i; j++) {
1308                 freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
1309             }
1310             return false;
1311         }
1312     }
1313 
1314     m_camera_info.isp.width = m_camera_info.sensor.width;
1315     m_camera_info.isp.height = m_camera_info.sensor.height;
1316     m_camera_info.isp.format = m_camera_info.sensor.format;
1317     m_camera_info.isp.planes = m_camera_info.sensor.planes;
1318     m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1319     m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1320     m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1321 
1322     for(i = 0; i < m_camera_info.isp.buffers; i++){
1323         initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1324         m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1325         m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1326         m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1327         m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1328         m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1329         m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1330     };
1331 
1332     /* init ISP */
1333     ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1334     if (ret < 0) {
1335         ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1336         return false;
1337     }
1338     cam_int_s_fmt(&(m_camera_info.isp));
1339     ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1340     cam_int_reqbufs(&(m_camera_info.isp));
1341     ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1342     ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1343 
1344     /* init Sensor */
1345     cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1346     ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1347     if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1348         ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1349     }
1350     ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1351     cam_int_reqbufs(&(m_camera_info.sensor));
1352     ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1353     for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1354         ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1355         m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1356         m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1357         memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1358                 sizeof(struct camera2_shot_ext));
1359     }
1360 
1361     for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1362         cam_int_qbuf(&(m_camera_info.sensor), i);
1363 
1364     for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1365         m_requestManager->pushSensorQ(i);
1366 
1367     ALOGV("== stream_on :: sensor");
1368     cam_int_streamon(&(m_camera_info.sensor));
1369     m_camera_info.sensor.status = true;
1370 
1371     /* init Capture */
1372     m_camera_info.capture.width = m_camera2->getSensorW();
1373     m_camera_info.capture.height = m_camera2->getSensorH();
1374     m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1375 #ifdef ENABLE_FRAME_SYNC
1376     m_camera_info.capture.planes = 2;
1377 #else
1378     m_camera_info.capture.planes = 1;
1379 #endif
1380     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1381     m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1382     m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1383 
1384     m_camera_info.capture.status = false;
1385 
1386     return true;
1387 }
1388 
StartSCCThread(bool threadExists)1389 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1390 {
1391     ALOGV("(%s)", __FUNCTION__);
1392     StreamThread *AllocatedStream;
1393     stream_parameters_t newParameters;
1394     uint32_t format_actual;
1395 
1396 
1397     if (!threadExists) {
1398         m_streamThreads[1]  = new StreamThread(this, 1);
1399     }
1400     AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1401     if (!threadExists) {
1402         AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1403         m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1404         AllocatedStream->m_numRegisteredStream = 1;
1405     }
1406     AllocatedStream->m_index        = 1;
1407 
1408     format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1409 
1410     newParameters.width             = m_camera2->getSensorW();
1411     newParameters.height            = m_camera2->getSensorH();
1412     newParameters.format            = format_actual;
1413     newParameters.streamOps         = NULL;
1414     newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1415 #ifdef ENABLE_FRAME_SYNC
1416     newParameters.planes            = 2;
1417 #else
1418     newParameters.planes            = 1;
1419 #endif
1420 
1421     newParameters.numSvcBufsInHal   = 0;
1422 
1423     newParameters.node              = &m_camera_info.capture;
1424 
1425     AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1426     ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1427 
1428     if (!threadExists) {
1429         if (!m_sccLocalBufferValid) {
1430             for (int i = 0; i < m_camera_info.capture.buffers; i++){
1431                 initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1432                 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1433 #ifdef ENABLE_FRAME_SYNC
1434                 m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1435                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1436 #else
1437                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1438 #endif
1439                 m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1440             }
1441             m_sccLocalBufferValid = true;
1442         }
1443     } else {
1444         if (m_sccLocalBufferValid) {
1445              for (int i = 0; i < m_camera_info.capture.buffers; i++)
1446                 m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1447         } else {
1448             ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1449         }
1450     }
1451     cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1452     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1453     cam_int_s_fmt(newParameters.node);
1454     ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1455     cam_int_reqbufs(newParameters.node);
1456     ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1457 
1458     for (int i = 0; i < newParameters.node->buffers; i++) {
1459         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1460         cam_int_qbuf(newParameters.node, i);
1461         newParameters.svcBufStatus[i] = ON_DRIVER;
1462     }
1463 
1464     ALOGV("== stream_on :: capture");
1465     if (cam_int_streamon(newParameters.node) < 0) {
1466         ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1467     } else {
1468         m_camera_info.capture.status = true;
1469     }
1470 
1471     AllocatedStream->setParameter(&newParameters);
1472     AllocatedStream->m_activated    = true;
1473     AllocatedStream->m_isBufferInit = true;
1474 }
1475 
StartISP()1476 void ExynosCameraHWInterface2::StartISP()
1477 {
1478     ALOGV("== stream_on :: isp");
1479     cam_int_streamon(&(m_camera_info.isp));
1480     exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1481 }
1482 
getCameraId() const1483 int ExynosCameraHWInterface2::getCameraId() const
1484 {
1485     return m_cameraId;
1486 }
1487 
setRequestQueueSrcOps(const camera2_request_queue_src_ops_t * request_src_ops)1488 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1489 {
1490     ALOGV("DEBUG(%s):", __FUNCTION__);
1491     if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1492             && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1493         m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1494         return 0;
1495     }
1496     else {
1497         ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1498         return 1;
1499     }
1500 }
1501 
notifyRequestQueueNotEmpty()1502 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1503 {
1504     int i = 0;
1505 
1506     ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1507     if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1508         ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1509         return 0;
1510     }
1511     m_isRequestQueueNull = false;
1512     if (m_requestManager->GetNumEntries() == 0)
1513         m_requestManager->SetInitialSkip(0);
1514 
1515     if (m_isIspStarted == false) {
1516         /* isp */
1517         m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1518         m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1519         cam_int_s_fmt(&(m_camera_info.isp));
1520         cam_int_reqbufs(&(m_camera_info.isp));
1521 
1522         /* sensor */
1523         if (m_camera_info.sensor.status == false) {
1524             cam_int_s_fmt(&(m_camera_info.sensor));
1525             cam_int_reqbufs(&(m_camera_info.sensor));
1526 
1527             for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1528                 ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1529                 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1530                 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1531                 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1532                         sizeof(struct camera2_shot_ext));
1533             }
1534             for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1535                 cam_int_qbuf(&(m_camera_info.sensor), i);
1536 
1537             for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1538                 m_requestManager->pushSensorQ(i);
1539             ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1540             cam_int_streamon(&(m_camera_info.sensor));
1541             m_camera_info.sensor.status = true;
1542         }
1543     }
1544     if (!(m_streamThreads[1].get())) {
1545         ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1546         StartSCCThread(false);
1547     } else {
1548         if (m_streamThreads[1]->m_activated ==  false) {
1549             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1550             StartSCCThread(true);
1551         } else {
1552             if (m_camera_info.capture.status == false) {
1553                 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1554                 cam_int_s_fmt(&(m_camera_info.capture));
1555                 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1556                 cam_int_reqbufs(&(m_camera_info.capture));
1557                 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1558 
1559                 if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1560                     StreamThread *          targetStream = m_streamThreads[1].get();
1561                     stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1562                     node_info_t             *currentNode = targetStreamParms->node;
1563 
1564                     struct v4l2_buffer v4l2_buf;
1565                     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1566 
1567                     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1568                         v4l2_buf.m.planes   = planes;
1569                         v4l2_buf.type       = currentNode->type;
1570                         v4l2_buf.memory     = currentNode->memory;
1571 
1572                         v4l2_buf.length     = currentNode->planes;
1573                         v4l2_buf.index      = i;
1574                         ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1575 
1576                         if (i < currentNode->buffers) {
1577 #ifdef ENABLE_FRAME_SYNC
1578                             v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1579                             v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1580                             v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1581                             v4l2_buf.length += targetStreamParms->metaPlanes;
1582                             v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1583                             v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1584 
1585                             ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1586 #endif
1587                             if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1588                                 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1589                             }
1590                             ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1591                             targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1592                         }
1593                         else {
1594                             targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1595                         }
1596 
1597                     }
1598 
1599                 } else {
1600                     for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1601                         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1602                         cam_int_qbuf(&(m_camera_info.capture), i);
1603                     }
1604                 }
1605                 ALOGV("== stream_on :: capture");
1606                 if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1607                     ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1608                 } else {
1609                     m_camera_info.capture.status = true;
1610                 }
1611             }
1612             if (m_scpForceSuspended) {
1613                 m_scpForceSuspended = false;
1614             }
1615         }
1616     }
1617     if (m_isIspStarted == false) {
1618         StartISP();
1619         ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1620         m_requestManager->SetInitialSkip(6);
1621         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1622         m_isIspStarted = true;
1623     }
1624     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1625     return 0;
1626 }
1627 
setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t * frame_dst_ops)1628 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1629 {
1630     ALOGV("DEBUG(%s):", __FUNCTION__);
1631     if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1632             && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1633         m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1634         return 0;
1635     }
1636     else {
1637         ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1638         return 1;
1639     }
1640 }
1641 
getInProgressCount()1642 int ExynosCameraHWInterface2::getInProgressCount()
1643 {
1644     int inProgressJpeg;
1645     int inProgressCount;
1646 
1647     {
1648         Mutex::Autolock lock(m_jpegEncoderLock);
1649         inProgressJpeg = m_jpegEncodingCount;
1650         inProgressCount = m_requestManager->GetNumEntries();
1651     }
1652     ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1653         inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
1654     return (inProgressCount + inProgressJpeg);
1655 }
1656 
flushCapturesInProgress()1657 int ExynosCameraHWInterface2::flushCapturesInProgress()
1658 {
1659     return 0;
1660 }
1661 
constructDefaultRequest(int request_template,camera_metadata_t ** request)1662 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1663 {
1664     ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1665 
1666     if (request == NULL) return BAD_VALUE;
1667     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1668         return BAD_VALUE;
1669     }
1670     status_t res;
1671     // Pass 1, calculate size and allocate
1672     res = m_camera2->constructDefaultRequest(request_template,
1673             request,
1674             true);
1675     if (res != OK) {
1676         return res;
1677     }
1678     // Pass 2, build request
1679     res = m_camera2->constructDefaultRequest(request_template,
1680             request,
1681             false);
1682     if (res != OK) {
1683         ALOGE("Unable to populate new request for template %d",
1684                 request_template);
1685     }
1686 
1687     return res;
1688 }
1689 
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)1690 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1691                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1692 {
1693     ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1694     bool useDirectOutput = false;
1695     StreamThread *AllocatedStream;
1696     stream_parameters_t newParameters;
1697     substream_parameters_t *subParameters;
1698     StreamThread *parentStream;
1699     status_t res;
1700     int allocCase = 0;
1701 
1702     if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1703             m_camera2->isSupportedResolution(width, height)) {
1704         if (!(m_streamThreads[0].get())) {
1705             ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1706             allocCase = 0;
1707         }
1708         else {
1709             if ((m_streamThreads[0].get())->m_activated == true) {
1710                 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1711                 allocCase = 1;
1712             }
1713             else {
1714                 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1715                 allocCase = 2;
1716             }
1717         }
1718 
1719         // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1720         if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1721                     || (width == 720 && height == 480) || (width == 1440 && height == 960)
1722                     || (width == 1344 && height == 896)) {
1723             m_wideAspect = true;
1724         } else {
1725             m_wideAspect = false;
1726         }
1727         ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1728 
1729         if (allocCase == 0 || allocCase == 2) {
1730             *stream_id = STREAM_ID_PREVIEW;
1731 
1732             m_streamThreads[0]  = new StreamThread(this, *stream_id);
1733 
1734             AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1735             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1736             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1737 
1738             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1739             *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1740             if (m_wideAspect)
1741                 *usage                         |= GRALLOC_USAGE_PRIVATE_CHROMA;
1742             *max_buffers                        = 7;
1743 
1744             newParameters.width                 = width;
1745             newParameters.height                = height;
1746             newParameters.format                = *format_actual;
1747             newParameters.streamOps             = stream_ops;
1748             newParameters.usage                 = *usage;
1749             newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1750             newParameters.numOwnSvcBuffers      = *max_buffers;
1751             newParameters.planes                = NUM_PLANES(*format_actual);
1752             newParameters.metaPlanes            = 1;
1753             newParameters.numSvcBufsInHal       = 0;
1754             newParameters.minUndequedBuffer     = 3;
1755             newParameters.needsIonMap           = true;
1756 
1757             newParameters.node                  = &m_camera_info.scp;
1758             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1759             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1760 
1761             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1762             AllocatedStream->m_index            = 0;
1763             AllocatedStream->setParameter(&newParameters);
1764             AllocatedStream->m_activated = true;
1765             AllocatedStream->m_numRegisteredStream = 1;
1766             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1767             m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1768             m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1769             if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1770                 AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1771             if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1772                 AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1773 
1774             // set video stabilization killswitch
1775             m_requestManager->m_vdisEnable = width > 352 && height > 288;
1776 
1777             return 0;
1778         } else if (allocCase == 1) {
1779             *stream_id = STREAM_ID_RECORD;
1780 
1781             subParameters = &m_subStreams[STREAM_ID_RECORD];
1782             memset(subParameters, 0, sizeof(substream_parameters_t));
1783 
1784             parentStream = (StreamThread*)(m_streamThreads[0].get());
1785             if (!parentStream) {
1786                 return 1;
1787             }
1788 
1789             *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1790             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1791             if (m_wideAspect)
1792                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1793             *max_buffers = 7;
1794 
1795             subParameters->type         = SUBSTREAM_TYPE_RECORD;
1796             subParameters->width        = width;
1797             subParameters->height       = height;
1798             subParameters->format       = *format_actual;
1799             subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1800             subParameters->streamOps     = stream_ops;
1801             subParameters->usage         = *usage;
1802             subParameters->numOwnSvcBuffers = *max_buffers;
1803             subParameters->numSvcBufsInHal  = 0;
1804             subParameters->needBufferInit    = false;
1805             subParameters->minUndequedBuffer = 2;
1806 
1807             res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1808             if (res != NO_ERROR) {
1809                 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1810                 return 1;
1811             }
1812             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1813             ALOGV("(%s): Enabling Record", __FUNCTION__);
1814             return 0;
1815         }
1816     }
1817     else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1818             && ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) {
1819 
1820         if (!(m_streamThreads[1].get())) {
1821             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1822             useDirectOutput = true;
1823         }
1824         else {
1825             ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1826             useDirectOutput = false;
1827         }
1828         if (useDirectOutput) {
1829             *stream_id = STREAM_ID_ZSL;
1830 
1831             m_streamThreads[1]  = new StreamThread(this, *stream_id);
1832             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1833             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1834             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1835 
1836             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1837 
1838             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1839             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1840             if (m_wideAspect)
1841                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1842             *max_buffers = 7;
1843 
1844             newParameters.width                 = width;
1845             newParameters.height                = height;
1846             newParameters.format                = *format_actual;
1847             newParameters.streamOps             = stream_ops;
1848             newParameters.usage                 = *usage;
1849             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1850             newParameters.numOwnSvcBuffers      = *max_buffers;
1851             newParameters.planes                = NUM_PLANES(*format_actual);
1852             newParameters.metaPlanes            = 1;
1853 
1854             newParameters.numSvcBufsInHal       = 0;
1855             newParameters.minUndequedBuffer     = 2;
1856             newParameters.needsIonMap           = false;
1857 
1858             newParameters.node                  = &m_camera_info.capture;
1859             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1860             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1861 
1862             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1863             AllocatedStream->m_index            = 1;
1864             AllocatedStream->setParameter(&newParameters);
1865             AllocatedStream->m_activated = true;
1866             AllocatedStream->m_numRegisteredStream = 1;
1867             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1868             return 0;
1869         } else {
1870             bool bJpegExists = false;
1871             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1872             subParameters = &m_subStreams[STREAM_ID_JPEG];
1873             if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1874                 ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1875                 bJpegExists = true;
1876                 AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1877             }
1878             AllocatedStream->m_releasing = true;
1879             ALOGD("START stream thread 1 release %d", __LINE__);
1880             do {
1881                 AllocatedStream->release();
1882                 usleep(SIG_WAITING_TICK);
1883             } while (AllocatedStream->m_releasing);
1884             ALOGD("END   stream thread 1 release %d", __LINE__);
1885 
1886             *stream_id = STREAM_ID_ZSL;
1887 
1888             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1889 
1890             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1891 
1892             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1893             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1894             if (m_wideAspect)
1895                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1896             *max_buffers = 7;
1897 
1898             newParameters.width                 = width;
1899             newParameters.height                = height;
1900             newParameters.format                = *format_actual;
1901             newParameters.streamOps             = stream_ops;
1902             newParameters.usage                 = *usage;
1903             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1904             newParameters.numOwnSvcBuffers      = *max_buffers;
1905             newParameters.planes                = NUM_PLANES(*format_actual);
1906             newParameters.metaPlanes            = 1;
1907 
1908             newParameters.numSvcBufsInHal       = 0;
1909             newParameters.minUndequedBuffer     = 2;
1910             newParameters.needsIonMap           = false;
1911 
1912             newParameters.node                  = &m_camera_info.capture;
1913             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1914             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1915 
1916             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1917             AllocatedStream->m_index            = 1;
1918             AllocatedStream->setParameter(&newParameters);
1919             AllocatedStream->m_activated = true;
1920             AllocatedStream->m_numRegisteredStream = 1;
1921             if (bJpegExists) {
1922                 AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1923             }
1924             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1925             return 0;
1926 
1927         }
1928     }
1929     else if (format == HAL_PIXEL_FORMAT_BLOB
1930             && m_camera2->isSupportedJpegResolution(width, height)) {
1931         *stream_id = STREAM_ID_JPEG;
1932 
1933         subParameters = &m_subStreams[*stream_id];
1934         memset(subParameters, 0, sizeof(substream_parameters_t));
1935 
1936         if (!(m_streamThreads[1].get())) {
1937             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1938             StartSCCThread(false);
1939         }
1940         else if (m_streamThreads[1]->m_activated ==  false) {
1941             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1942             StartSCCThread(true);
1943         }
1944         parentStream = (StreamThread*)(m_streamThreads[1].get());
1945 
1946         *format_actual = HAL_PIXEL_FORMAT_BLOB;
1947         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1948         if (m_wideAspect)
1949             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1950         *max_buffers = 5;
1951 
1952         subParameters->type          = SUBSTREAM_TYPE_JPEG;
1953         subParameters->width         = width;
1954         subParameters->height        = height;
1955         subParameters->format        = *format_actual;
1956         subParameters->svcPlanes     = 1;
1957         subParameters->streamOps     = stream_ops;
1958         subParameters->usage         = *usage;
1959         subParameters->numOwnSvcBuffers = *max_buffers;
1960         subParameters->numSvcBufsInHal  = 0;
1961         subParameters->needBufferInit    = false;
1962         subParameters->minUndequedBuffer = 2;
1963 
1964         res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1965         if (res != NO_ERROR) {
1966             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1967             return 1;
1968         }
1969         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1970         ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1971         return 0;
1972     }
1973     else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1974         *stream_id = STREAM_ID_PRVCB;
1975 
1976         subParameters = &m_subStreams[STREAM_ID_PRVCB];
1977         memset(subParameters, 0, sizeof(substream_parameters_t));
1978 
1979         parentStream = (StreamThread*)(m_streamThreads[0].get());
1980         if (!parentStream) {
1981             return 1;
1982         }
1983 
1984         *format_actual = format;
1985         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1986         if (m_wideAspect)
1987             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1988         *max_buffers = 7;
1989 
1990         subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1991         subParameters->width        = width;
1992         subParameters->height       = height;
1993         subParameters->format       = *format_actual;
1994         subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1995         subParameters->streamOps     = stream_ops;
1996         subParameters->usage         = *usage;
1997         subParameters->numOwnSvcBuffers = *max_buffers;
1998         subParameters->numSvcBufsInHal  = 0;
1999         subParameters->needBufferInit    = false;
2000         subParameters->minUndequedBuffer = 2;
2001 
2002         if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
2003             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
2004             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
2005         }
2006         else {
2007             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
2008             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
2009         }
2010 
2011         res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
2012         if (res != NO_ERROR) {
2013             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
2014             return 1;
2015         }
2016         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
2017         ALOGV("(%s): Enabling previewcb", __FUNCTION__);
2018         return 0;
2019     }
2020     ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
2021     return 1;
2022 }
2023 
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t * registeringBuffers)2024 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
2025         int num_buffers, buffer_handle_t *registeringBuffers)
2026 {
2027     int                     i,j;
2028     void                    *virtAddr[3];
2029     int                     plane_index = 0;
2030     StreamThread *          targetStream;
2031     stream_parameters_t     *targetStreamParms;
2032     node_info_t             *currentNode;
2033 
2034     struct v4l2_buffer v4l2_buf;
2035     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
2036 
2037     ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
2038         stream_id, num_buffers, (uint32_t)registeringBuffers);
2039 
2040     if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
2041         targetStream = m_streamThreads[0].get();
2042         targetStreamParms = &(m_streamThreads[0]->m_parameters);
2043 
2044     }
2045     else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
2046         substream_parameters_t  *targetParms;
2047         targetParms = &m_subStreams[stream_id];
2048 
2049         targetParms->numSvcBuffers = num_buffers;
2050 
2051         for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
2052             ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2053                 i, stream_id, (uint32_t)(registeringBuffers[i]));
2054             if (m_grallocHal) {
2055                 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2056                        targetParms->usage, 0, 0,
2057                        targetParms->width, targetParms->height, virtAddr) != 0) {
2058                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2059                 }
2060                 else {
2061                     ExynosBuffer currentBuf;
2062                     const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2063                     if (targetParms->svcPlanes == 1) {
2064                         currentBuf.fd.extFd[0] = priv_handle->fd;
2065                         currentBuf.size.extS[0] = priv_handle->size;
2066                         currentBuf.size.extS[1] = 0;
2067                         currentBuf.size.extS[2] = 0;
2068                     } else if (targetParms->svcPlanes == 2) {
2069                         currentBuf.fd.extFd[0] = priv_handle->fd;
2070                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2071 
2072                     } else if (targetParms->svcPlanes == 3) {
2073                         currentBuf.fd.extFd[0] = priv_handle->fd;
2074                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2075                         currentBuf.fd.extFd[2] = priv_handle->fd2;
2076                     }
2077                     for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2078                         currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2079                         CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2080                              __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2081                              (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2082                     }
2083                     targetParms->svcBufStatus[i]  = ON_SERVICE;
2084                     targetParms->svcBuffers[i]    = currentBuf;
2085                     targetParms->svcBufHandle[i]  = registeringBuffers[i];
2086                 }
2087             }
2088         }
2089         targetParms->needBufferInit = true;
2090         return 0;
2091     }
2092     else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2093         targetStream = m_streamThreads[1].get();
2094         targetStreamParms = &(m_streamThreads[1]->m_parameters);
2095     }
2096     else {
2097         ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2098         return 1;
2099     }
2100 
2101     if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2102         if (num_buffers < targetStreamParms->numHwBuffers) {
2103             ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2104                 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2105             return 1;
2106         }
2107     }
2108     CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2109             __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2110             targetStreamParms->height, targetStreamParms->planes);
2111     targetStreamParms->numSvcBuffers = num_buffers;
2112     currentNode = targetStreamParms->node;
2113     currentNode->width      = targetStreamParms->width;
2114     currentNode->height     = targetStreamParms->height;
2115     currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2116     currentNode->planes     = targetStreamParms->planes;
2117     currentNode->buffers    = targetStreamParms->numHwBuffers;
2118     cam_int_s_input(currentNode, m_camera_info.sensor_id);
2119     cam_int_s_fmt(currentNode);
2120     cam_int_reqbufs(currentNode);
2121     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2122         ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2123             i, (uint32_t)(registeringBuffers[i]));
2124                 v4l2_buf.m.planes   = planes;
2125                 v4l2_buf.type       = currentNode->type;
2126                 v4l2_buf.memory     = currentNode->memory;
2127                 v4l2_buf.index      = i;
2128                 v4l2_buf.length     = currentNode->planes;
2129 
2130                 ExynosBuffer currentBuf;
2131                 ExynosBuffer metaBuf;
2132                 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2133 
2134                 m_getAlignedYUVSize(currentNode->format,
2135                     currentNode->width, currentNode->height, &currentBuf);
2136 
2137                 ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2138                 if (currentNode->planes == 1) {
2139                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2140                     currentBuf.fd.extFd[0] = priv_handle->fd;
2141                     currentBuf.size.extS[0] = priv_handle->size;
2142                     currentBuf.size.extS[1] = 0;
2143                     currentBuf.size.extS[2] = 0;
2144                 } else if (currentNode->planes == 2) {
2145                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2146                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2147                     currentBuf.fd.extFd[0] = priv_handle->fd;
2148                     currentBuf.fd.extFd[1] = priv_handle->fd1;
2149 
2150                 } else if (currentNode->planes == 3) {
2151                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2152                     v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2153                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2154                     currentBuf.fd.extFd[0] = priv_handle->fd;
2155                     currentBuf.fd.extFd[2] = priv_handle->fd1;
2156                     currentBuf.fd.extFd[1] = priv_handle->fd2;
2157                 }
2158 
2159                 for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2160                     if (targetStreamParms->needsIonMap)
2161                         currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2162                     v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2163                     ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2164                          __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2165                          (unsigned int)currentBuf.virt.extP[plane_index],
2166                          v4l2_buf.m.planes[plane_index].length);
2167                 }
2168 
2169                 if (i < currentNode->buffers) {
2170 
2171 
2172 #ifdef ENABLE_FRAME_SYNC
2173                     /* add plane for metadata*/
2174                     metaBuf.size.extS[0] = 4*1024;
2175                     allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2176 
2177                     v4l2_buf.length += targetStreamParms->metaPlanes;
2178                     v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2179                     v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2180 
2181                     ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2182 #endif
2183                     if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2184                         ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2185                             __FUNCTION__, stream_id, currentNode->fd);
2186                     }
2187                     ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2188                             __FUNCTION__, stream_id, currentNode->fd);
2189                     targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2190                 }
2191                 else {
2192                     targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2193                 }
2194 
2195                 targetStreamParms->svcBuffers[i]       = currentBuf;
2196                 targetStreamParms->metaBuffers[i] = metaBuf;
2197                 targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2198             }
2199 
2200     ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2201     cam_int_streamon(targetStreamParms->node);
2202     ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2203     currentNode->status = true;
2204     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2205 
2206     return 0;
2207 }
2208 
releaseStream(uint32_t stream_id)2209 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2210 {
2211     StreamThread *targetStream;
2212     status_t res = NO_ERROR;
2213     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2214     bool releasingScpMain = false;
2215 
2216     if (stream_id == STREAM_ID_PREVIEW) {
2217         targetStream = (StreamThread*)(m_streamThreads[0].get());
2218         if (!targetStream) {
2219             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2220             return NO_ERROR;
2221         }
2222         targetStream->m_numRegisteredStream--;
2223         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2224         releasingScpMain = true;
2225         if (targetStream->m_parameters.needsIonMap) {
2226             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2227                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2228                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2229                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2230                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2231                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2232                 }
2233             }
2234         }
2235     } else if (stream_id == STREAM_ID_JPEG) {
2236         if (m_resizeBuf.size.s != 0) {
2237             freeCameraMemory(&m_resizeBuf, 1);
2238         }
2239         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2240 
2241         targetStream = (StreamThread*)(m_streamThreads[1].get());
2242         if (!targetStream) {
2243             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2244             return NO_ERROR;
2245         }
2246 
2247         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2248             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2249             return 1;
2250         }
2251         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2252         return 0;
2253     } else if (stream_id == STREAM_ID_RECORD) {
2254         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2255 
2256         targetStream = (StreamThread*)(m_streamThreads[0].get());
2257         if (!targetStream) {
2258             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2259             return NO_ERROR;
2260         }
2261 
2262         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2263             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2264             return 1;
2265         }
2266 
2267         if (targetStream->m_numRegisteredStream != 0)
2268             return 0;
2269     } else if (stream_id == STREAM_ID_PRVCB) {
2270         if (m_previewCbBuf.size.s != 0) {
2271             freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2272         }
2273         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2274 
2275         targetStream = (StreamThread*)(m_streamThreads[0].get());
2276         if (!targetStream) {
2277             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2278             return NO_ERROR;
2279         }
2280 
2281         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2282             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2283             return 1;
2284         }
2285 
2286         if (targetStream->m_numRegisteredStream != 0)
2287             return 0;
2288     } else if (stream_id == STREAM_ID_ZSL) {
2289         targetStream = (StreamThread*)(m_streamThreads[1].get());
2290         if (!targetStream) {
2291             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2292             return NO_ERROR;
2293         }
2294 
2295         targetStream->m_numRegisteredStream--;
2296         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2297         if (targetStream->m_parameters.needsIonMap) {
2298             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2299                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2300                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2301                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2302                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2303                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2304                 }
2305             }
2306         }
2307     } else {
2308         ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2309         return 1;
2310     }
2311 
2312     if (m_sensorThread != NULL && releasingScpMain) {
2313         m_sensorThread->release();
2314         ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2315         while (!m_sensorThread->IsTerminated())
2316             usleep(SIG_WAITING_TICK);
2317         ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2318     }
2319 
2320     if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2321         ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2322         targetStream = (StreamThread*)(m_streamThreads[1].get());
2323         targetStream->m_releasing = true;
2324         ALOGD("START stream thread release %d", __LINE__);
2325         do {
2326             targetStream->release();
2327             usleep(SIG_WAITING_TICK);
2328         } while (targetStream->m_releasing);
2329         m_camera_info.capture.status = false;
2330         ALOGD("END   stream thread release %d", __LINE__);
2331     }
2332 
2333     if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2334         ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2335         targetStream = (StreamThread*)(m_streamThreads[0].get());
2336         targetStream->m_releasing = true;
2337         ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2338         do {
2339             targetStream->release();
2340             usleep(SIG_WAITING_TICK);
2341         } while (targetStream->m_releasing);
2342         ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2343         targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2344 
2345         if (targetStream != NULL) {
2346             ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2347             while (!targetStream->IsTerminated())
2348                 usleep(SIG_WAITING_TICK);
2349             ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2350             m_streamThreads[0] = NULL;
2351         }
2352         if (m_camera_info.capture.status == true) {
2353             m_scpForceSuspended = true;
2354         }
2355         m_isIspStarted = false;
2356     }
2357     ALOGV("(%s): END", __FUNCTION__);
2358     return 0;
2359 }
2360 
allocateReprocessStream(uint32_t,uint32_t,uint32_t,const camera2_stream_in_ops_t *,uint32_t *,uint32_t *,uint32_t *)2361 int ExynosCameraHWInterface2::allocateReprocessStream(
2362     uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/,
2363     const camera2_stream_in_ops_t* /*reprocess_stream_ops*/,
2364     uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/)
2365 {
2366     ALOGV("DEBUG(%s):", __FUNCTION__);
2367     return 0;
2368 }
2369 
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)2370 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2371             uint32_t output_stream_id,
2372             const camera2_stream_in_ops_t *reprocess_stream_ops,
2373             // outputs
2374             uint32_t *stream_id)
2375 {
2376     ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2377     *stream_id = STREAM_ID_JPEG_REPROCESS;
2378 
2379     m_reprocessStreamId = *stream_id;
2380     m_reprocessOps = reprocess_stream_ops;
2381     m_reprocessOutputStreamId = output_stream_id;
2382     return 0;
2383 }
2384 
releaseReprocessStream(uint32_t stream_id)2385 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2386 {
2387     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2388     if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2389         m_reprocessStreamId = 0;
2390         m_reprocessOps = NULL;
2391         m_reprocessOutputStreamId = 0;
2392         return 0;
2393     }
2394     return 1;
2395 }
2396 
triggerAction(uint32_t trigger_id,int ext1,int ext2)2397 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2398 {
2399     Mutex::Autolock lock(m_afModeTriggerLock);
2400     ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2401 
2402     switch (trigger_id) {
2403     case CAMERA2_TRIGGER_AUTOFOCUS:
2404         ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2405         OnAfTrigger(ext1);
2406         break;
2407 
2408     case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2409         ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2410         OnAfCancel(ext1);
2411         break;
2412     case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2413         ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2414         OnPrecaptureMeteringTriggerStart(ext1);
2415         break;
2416     default:
2417         break;
2418     }
2419     return 0;
2420 }
2421 
setNotifyCallback(camera2_notify_callback notify_cb,void * user)2422 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2423 {
2424     ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2425     m_notifyCb = notify_cb;
2426     m_callbackCookie = user;
2427     return 0;
2428 }
2429 
getMetadataVendorTagOps(vendor_tag_query_ops_t ** ops)2430 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2431 {
2432     ALOGV("DEBUG(%s):", __FUNCTION__);
2433     *ops = NULL;
2434     return 0;
2435 }
2436 
dump(int)2437 int ExynosCameraHWInterface2::dump(int /*fd*/)
2438 {
2439     ALOGV("DEBUG(%s):", __FUNCTION__);
2440     return 0;
2441 }
2442 
m_getAlignedYUVSize(int colorFormat,int w,int h,ExynosBuffer * buf)2443 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2444 {
2445     switch (colorFormat) {
2446     // 1p
2447     case V4L2_PIX_FMT_RGB565 :
2448     case V4L2_PIX_FMT_YUYV :
2449     case V4L2_PIX_FMT_UYVY :
2450     case V4L2_PIX_FMT_VYUY :
2451     case V4L2_PIX_FMT_YVYU :
2452         buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2453         buf->size.extS[1] = 0;
2454         buf->size.extS[2] = 0;
2455         break;
2456     // 2p
2457     case V4L2_PIX_FMT_NV12 :
2458     case V4L2_PIX_FMT_NV12T :
2459     case V4L2_PIX_FMT_NV21 :
2460         buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2461         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2462         buf->size.extS[2] = 0;
2463         break;
2464     case V4L2_PIX_FMT_NV12M :
2465     case V4L2_PIX_FMT_NV12MT_16X16 :
2466     case V4L2_PIX_FMT_NV21M:
2467         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2468         buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2469         buf->size.extS[2] = 0;
2470         break;
2471     case V4L2_PIX_FMT_NV16 :
2472     case V4L2_PIX_FMT_NV61 :
2473         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2474         buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2475         buf->size.extS[2] = 0;
2476         break;
2477      // 3p
2478     case V4L2_PIX_FMT_YUV420 :
2479     case V4L2_PIX_FMT_YVU420 :
2480         buf->size.extS[0] = (w * h);
2481         buf->size.extS[1] = (w * h) >> 2;
2482         buf->size.extS[2] = (w * h) >> 2;
2483         break;
2484     case V4L2_PIX_FMT_YUV420M:
2485     case V4L2_PIX_FMT_YVU420M :
2486         buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2487         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2488         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2489         break;
2490     case V4L2_PIX_FMT_YUV422P :
2491         buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2492         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2493         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2494         break;
2495     default:
2496         ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2497         return;
2498         break;
2499     }
2500 }
2501 
m_getRatioSize(int src_w,int src_h,int dst_w,int dst_h,int * crop_x,int * crop_y,int * crop_w,int * crop_h,int zoom)2502 bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2503                                              int  dst_w,  int   dst_h,
2504                                              int *crop_x, int *crop_y,
2505                                              int *crop_w, int *crop_h,
2506                                              int zoom)
2507 {
2508     *crop_w = src_w;
2509     *crop_h = src_h;
2510 
2511     if (   src_w != dst_w
2512         || src_h != dst_h) {
2513         float src_ratio = 1.0f;
2514         float dst_ratio = 1.0f;
2515 
2516         // ex : 1024 / 768
2517         src_ratio = (float)src_w / (float)src_h;
2518 
2519         // ex : 352  / 288
2520         dst_ratio = (float)dst_w / (float)dst_h;
2521 
2522         if (dst_w * dst_h < src_w * src_h) {
2523             if (dst_ratio <= src_ratio) {
2524                 // shrink w
2525                 *crop_w = src_h * dst_ratio;
2526                 *crop_h = src_h;
2527             } else {
2528                 // shrink h
2529                 *crop_w = src_w;
2530                 *crop_h = src_w / dst_ratio;
2531             }
2532         } else {
2533             if (dst_ratio <= src_ratio) {
2534                 // shrink w
2535                 *crop_w = src_h * dst_ratio;
2536                 *crop_h = src_h;
2537             } else {
2538                 // shrink h
2539                 *crop_w = src_w;
2540                 *crop_h = src_w / dst_ratio;
2541             }
2542         }
2543     }
2544 
2545     if (zoom != 0) {
2546         float zoomLevel = ((float)zoom + 10.0) / 10.0;
2547         *crop_w = (int)((float)*crop_w / zoomLevel);
2548         *crop_h = (int)((float)*crop_h / zoomLevel);
2549     }
2550 
2551     #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2552     unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2553     if (w_align != 0) {
2554         if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2555             && (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) {
2556             *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2557         }
2558         else
2559             *crop_w -= w_align;
2560     }
2561 
2562     #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2563     unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2564     if (h_align != 0) {
2565         if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2566             && (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) {
2567             *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2568         }
2569         else
2570             *crop_h -= h_align;
2571     }
2572 
2573     *crop_x = (src_w - *crop_w) >> 1;
2574     *crop_y = (src_h - *crop_h) >> 1;
2575 
2576     if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2577         *crop_x -= 1;
2578 
2579     if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2580         *crop_y -= 1;
2581 
2582     return true;
2583 }
2584 
BayerBufManager()2585 BayerBufManager::BayerBufManager()
2586 {
2587     ALOGV("DEBUG(%s): ", __FUNCTION__);
2588     for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2589         entries[i].status = BAYER_ON_HAL_EMPTY;
2590         entries[i].reqFrameCnt = 0;
2591     }
2592     sensorEnqueueHead = 0;
2593     sensorDequeueHead = 0;
2594     ispEnqueueHead = 0;
2595     ispDequeueHead = 0;
2596     numOnSensor = 0;
2597     numOnIsp = 0;
2598     numOnHalFilled = 0;
2599     numOnHalEmpty = NUM_BAYER_BUFFERS;
2600 }
2601 
~BayerBufManager()2602 BayerBufManager::~BayerBufManager()
2603 {
2604     ALOGV("%s", __FUNCTION__);
2605 }
2606 
GetIndexForSensorEnqueue()2607 int     BayerBufManager::GetIndexForSensorEnqueue()
2608 {
2609     int ret = 0;
2610     if (numOnHalEmpty == 0)
2611         ret = -1;
2612     else
2613         ret = sensorEnqueueHead;
2614     ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2615     return ret;
2616 }
2617 
MarkSensorEnqueue(int index)2618 int    BayerBufManager::MarkSensorEnqueue(int index)
2619 {
2620     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2621 
2622     // sanity check
2623     if (index != sensorEnqueueHead) {
2624         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2625         return -1;
2626     }
2627     if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2628         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2629             index, entries[index].status, BAYER_ON_HAL_EMPTY);
2630         return -1;
2631     }
2632 
2633     entries[index].status = BAYER_ON_SENSOR;
2634     entries[index].reqFrameCnt = 0;
2635     numOnHalEmpty--;
2636     numOnSensor++;
2637     sensorEnqueueHead = GetNextIndex(index);
2638     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2639         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2640     return 0;
2641 }
2642 
MarkSensorDequeue(int index,int reqFrameCnt,nsecs_t *)2643 int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/)
2644 {
2645     ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2646 
2647     if (entries[index].status != BAYER_ON_SENSOR) {
2648         ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2649             index, entries[index].status, BAYER_ON_SENSOR);
2650         return -1;
2651     }
2652 
2653     entries[index].status = BAYER_ON_HAL_FILLED;
2654     numOnHalFilled++;
2655     numOnSensor--;
2656 
2657     return 0;
2658 }
2659 
GetIndexForIspEnqueue(int * reqFrameCnt)2660 int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2661 {
2662     int ret = 0;
2663     if (numOnHalFilled == 0)
2664         ret = -1;
2665     else {
2666         *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2667         ret = ispEnqueueHead;
2668     }
2669     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2670     return ret;
2671 }
2672 
GetIndexForIspDequeue(int * reqFrameCnt)2673 int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2674 {
2675     int ret = 0;
2676     if (numOnIsp == 0)
2677         ret = -1;
2678     else {
2679         *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2680         ret = ispDequeueHead;
2681     }
2682     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2683     return ret;
2684 }
2685 
MarkIspEnqueue(int index)2686 int    BayerBufManager::MarkIspEnqueue(int index)
2687 {
2688     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2689 
2690     // sanity check
2691     if (index != ispEnqueueHead) {
2692         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2693         return -1;
2694     }
2695     if (entries[index].status != BAYER_ON_HAL_FILLED) {
2696         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2697             index, entries[index].status, BAYER_ON_HAL_FILLED);
2698         return -1;
2699     }
2700 
2701     entries[index].status = BAYER_ON_ISP;
2702     numOnHalFilled--;
2703     numOnIsp++;
2704     ispEnqueueHead = GetNextIndex(index);
2705     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2706         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2707     return 0;
2708 }
2709 
MarkIspDequeue(int index)2710 int    BayerBufManager::MarkIspDequeue(int index)
2711 {
2712     ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2713 
2714     // sanity check
2715     if (index != ispDequeueHead) {
2716         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2717         return -1;
2718     }
2719     if (entries[index].status != BAYER_ON_ISP) {
2720         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2721             index, entries[index].status, BAYER_ON_ISP);
2722         return -1;
2723     }
2724 
2725     entries[index].status = BAYER_ON_HAL_EMPTY;
2726     entries[index].reqFrameCnt = 0;
2727     numOnHalEmpty++;
2728     numOnIsp--;
2729     ispDequeueHead = GetNextIndex(index);
2730     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2731         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2732     return 0;
2733 }
2734 
GetNumOnSensor()2735 int BayerBufManager::GetNumOnSensor()
2736 {
2737     return numOnSensor;
2738 }
2739 
GetNumOnHalFilled()2740 int BayerBufManager::GetNumOnHalFilled()
2741 {
2742     return numOnHalFilled;
2743 }
2744 
GetNumOnIsp()2745 int BayerBufManager::GetNumOnIsp()
2746 {
2747     return numOnIsp;
2748 }
2749 
GetNextIndex(int index)2750 int     BayerBufManager::GetNextIndex(int index)
2751 {
2752     index++;
2753     if (index >= NUM_BAYER_BUFFERS)
2754         index = 0;
2755 
2756     return index;
2757 }
2758 
m_mainThreadFunc(SignalDrivenThread * self)2759 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2760 {
2761     camera_metadata_t *currentRequest = NULL;
2762     camera_metadata_t *currentFrame = NULL;
2763     size_t numEntries = 0;
2764     size_t frameSize = 0;
2765     camera_metadata_t * preparedFrame = NULL;
2766     camera_metadata_t *deregisteredRequest = NULL;
2767     uint32_t currentSignal = self->GetProcessingSignal();
2768     MainThread *  selfThread      = ((MainThread*)self);
2769     int res = 0;
2770 
2771     int ret;
2772     int afMode;
2773     uint32_t afRegion[4];
2774 
2775     ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2776 
2777     if (currentSignal & SIGNAL_THREAD_RELEASE) {
2778         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2779 
2780         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2781         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2782         return;
2783     }
2784 
2785     if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2786         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2787         if (m_requestManager->IsRequestQueueFull()==false) {
2788             Mutex::Autolock lock(m_afModeTriggerLock);
2789             m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2790             if (NULL == currentRequest) {
2791                 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2792                 m_isRequestQueueNull = true;
2793                 if (m_requestManager->IsVdisEnable())
2794                     m_vdisBubbleCnt = 1;
2795             }
2796             else {
2797                 m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
2798 
2799                 SetAfMode((enum aa_afmode)afMode);
2800                 SetAfRegion(afRegion);
2801 
2802                 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2803                 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2804                 if (m_requestManager->IsRequestQueueFull()==false)
2805                     selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2806 
2807                 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2808             }
2809         }
2810         else {
2811             m_isRequestQueuePending = true;
2812         }
2813     }
2814 
2815     if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2816         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2817         /*while (1)*/ {
2818             ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2819             if (ret == false)
2820                 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2821 
2822             m_requestManager->DeregisterRequest(&deregisteredRequest);
2823 
2824             ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2825             if (ret < 0)
2826                 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2827 
2828             ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2829             if (ret < 0)
2830                 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2831 
2832             if (currentFrame==NULL) {
2833                 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2834             }
2835             else {
2836                 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2837             }
2838             res = append_camera_metadata(currentFrame, preparedFrame);
2839             if (res==0) {
2840                 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2841                 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2842             }
2843             else {
2844                 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2845             }
2846         }
2847         if (!m_isRequestQueueNull) {
2848             selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2849         }
2850 
2851         if (getInProgressCount()>0) {
2852             ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2853             m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2854         }
2855     }
2856     ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2857     return;
2858 }
2859 
DumpInfoWithShot(struct camera2_shot_ext * shot_ext)2860 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2861 {
2862     ALOGD("####  common Section");
2863     ALOGD("####                 magic(%x) ",
2864         shot_ext->shot.magicNumber);
2865     ALOGD("####  ctl Section");
2866     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2867         shot_ext->shot.ctl.request.metadataMode,
2868         shot_ext->shot.ctl.lens.aperture,
2869         shot_ext->shot.ctl.sensor.exposureTime,
2870         shot_ext->shot.ctl.sensor.frameDuration,
2871         shot_ext->shot.ctl.sensor.sensitivity,
2872         shot_ext->shot.ctl.aa.awbMode);
2873 
2874     ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2875         shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2876         shot_ext->shot.ctl.request.outputStreams[0]);
2877 
2878     ALOGD("####  DM Section");
2879     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2880         shot_ext->shot.dm.request.metadataMode,
2881         shot_ext->shot.dm.lens.aperture,
2882         shot_ext->shot.dm.sensor.exposureTime,
2883         shot_ext->shot.dm.sensor.frameDuration,
2884         shot_ext->shot.dm.sensor.sensitivity,
2885         shot_ext->shot.dm.sensor.timeStamp,
2886         shot_ext->shot.dm.aa.awbMode,
2887         shot_ext->shot.dm.request.frameCount );
2888 }
2889 
m_preCaptureSetter(struct camera2_shot_ext * shot_ext)2890 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2891 {
2892     // Flash
2893     switch (m_ctlInfo.flash.m_flashCnt) {
2894     case IS_FLASH_STATE_ON:
2895         ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2896         // check AF locked
2897         if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2898             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2899                 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2900                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2901                     m_ctlInfo.flash.m_flashTimeOut = 5;
2902                 } else
2903                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2904                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2905             } else {
2906                 m_ctlInfo.flash.m_flashTimeOut--;
2907             }
2908         } else {
2909             if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2910                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2911                 m_ctlInfo.flash.m_flashTimeOut = 5;
2912             } else
2913                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2914             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2915         }
2916         break;
2917     case IS_FLASH_STATE_ON_WAIT:
2918         break;
2919     case IS_FLASH_STATE_ON_DONE:
2920         if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2921             // auto transition at pre-capture trigger
2922             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2923         break;
2924     case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2925         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2926         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2927         //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2928         shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2929         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2930         break;
2931     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2932     case IS_FLASH_STATE_AUTO_WAIT:
2933         shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2934         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2935         break;
2936     case IS_FLASH_STATE_AUTO_DONE:
2937         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2938         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2939         break;
2940     case IS_FLASH_STATE_AUTO_OFF:
2941         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2942         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2943         m_ctlInfo.flash.m_flashEnableFlg = false;
2944         break;
2945     case IS_FLASH_STATE_CAPTURE:
2946         ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2947         m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2948         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2949         shot_ext->request_scc = 0;
2950         shot_ext->request_scp = 0;
2951         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2952         break;
2953     case IS_FLASH_STATE_CAPTURE_WAIT:
2954         shot_ext->request_scc = 0;
2955         shot_ext->request_scp = 0;
2956         break;
2957     case IS_FLASH_STATE_CAPTURE_JPEG:
2958         ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2959         shot_ext->request_scc = 1;
2960         shot_ext->request_scp = 1;
2961         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2962         break;
2963     case IS_FLASH_STATE_CAPTURE_END:
2964         ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2965         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2966         shot_ext->request_scc = 0;
2967         shot_ext->request_scp = 0;
2968         m_ctlInfo.flash.m_flashEnableFlg = false;
2969         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
2970         m_ctlInfo.flash.m_afFlashDoneFlg= false;
2971         break;
2972     case IS_FLASH_STATE_NONE:
2973         break;
2974     default:
2975         ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2976     }
2977 }
2978 
m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)2979 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2980 {
2981     // Flash
2982     switch (m_ctlInfo.flash.m_flashCnt) {
2983     case IS_FLASH_STATE_AUTO_WAIT:
2984         if (m_ctlInfo.flash.m_flashDecisionResult) {
2985             if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2986                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2987                 ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2988             } else {
2989                 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2990             }
2991         } else {
2992             //If flash isn't activated at flash auto mode, skip flash auto control
2993             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2994             ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2995         }
2996         break;
2997     }
2998 }
2999 
m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)3000 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
3001 {
3002     // Flash
3003     switch (m_ctlInfo.flash.m_flashCnt) {
3004     case IS_FLASH_STATE_ON_WAIT:
3005         if (shot_ext->shot.dm.flash.decision > 0) {
3006             // store decision result to skip capture sequenece
3007             ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
3008             if (shot_ext->shot.dm.flash.decision == 2)
3009                 m_ctlInfo.flash.m_flashDecisionResult = false;
3010             else
3011                 m_ctlInfo.flash.m_flashDecisionResult = true;
3012             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3013         } else {
3014             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3015                 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
3016                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3017                 m_ctlInfo.flash.m_flashDecisionResult = false;
3018             } else {
3019                 m_ctlInfo.flash.m_flashTimeOut--;
3020             }
3021         }
3022         break;
3023     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
3024         if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
3025             ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
3026             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
3027         } else {
3028             ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
3029         }
3030         break;
3031     case IS_FLASH_STATE_CAPTURE_WAIT:
3032         if (m_ctlInfo.flash.m_flashDecisionResult) {
3033             if (shot_ext->shot.dm.flash.firingStable) {
3034                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3035             } else {
3036                 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3037                     ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
3038                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3039                 } else {
3040                     ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
3041                     m_ctlInfo.flash.m_flashTimeOut--;
3042                 }
3043             }
3044         } else {
3045             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3046         }
3047         break;
3048     }
3049 }
3050 
m_preCaptureAeState(struct camera2_shot_ext * shot_ext)3051 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
3052 {
3053     switch (m_ctlInfo.flash.i_flashMode) {
3054     case AA_AEMODE_ON:
3055         // At flash off mode, capture can be done as zsl capture
3056         shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3057         break;
3058     case AA_AEMODE_ON_AUTO_FLASH:
3059         // At flash auto mode, main flash have to be done if pre-flash was done.
3060         if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3061             shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3062         // FALLTHRU
3063     default:
3064         break;
3065     }
3066 }
3067 
m_updateAfRegion(struct camera2_shot_ext * shot_ext)3068 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3069 {
3070     shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
3071     shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
3072     shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
3073     shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
3074 }
3075 
SetAfRegion(uint32_t * afRegion)3076 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
3077 {
3078     currentAfRegion[0] = afRegion[0];
3079     currentAfRegion[1] = afRegion[1];
3080     currentAfRegion[2] = afRegion[2];
3081     currentAfRegion[3] = afRegion[3];
3082 }
3083 
m_afTrigger(struct camera2_shot_ext * shot_ext,int mode)3084 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3085 {
3086     if (m_afState == HAL_AFSTATE_SCANNING) {
3087         ALOGD("(%s): restarting trigger ", __FUNCTION__);
3088     } else if (!mode) {
3089         if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3090             ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3091         else
3092             m_afState = HAL_AFSTATE_STARTED;
3093     }
3094     ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3095     shot_ext->shot.ctl.aa.afTrigger = 1;
3096     shot_ext->shot.ctl.aa.afMode = m_afMode;
3097     m_IsAfTriggerRequired = false;
3098 }
3099 
m_sensorThreadFunc(SignalDrivenThread * self)3100 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3101 {
3102     uint32_t        currentSignal = self->GetProcessingSignal();
3103     SensorThread *  selfThread      = ((SensorThread*)self);
3104     int index;
3105     int index_isp;
3106     status_t res;
3107     nsecs_t frameTime;
3108     int bayersOnSensor = 0, bayersOnIsp = 0;
3109     int j = 0;
3110     bool isCapture = false;
3111     ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3112 
3113     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3114         CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3115 
3116         ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3117         cam_int_streamoff(&(m_camera_info.sensor));
3118         ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3119 
3120         m_camera_info.sensor.buffers = 0;
3121         ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3122         cam_int_reqbufs(&(m_camera_info.sensor));
3123         ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3124         m_camera_info.sensor.status = false;
3125 
3126         ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3127         isp_int_streamoff(&(m_camera_info.isp));
3128         ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3129 
3130         m_camera_info.isp.buffers = 0;
3131         ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3132         cam_int_reqbufs(&(m_camera_info.isp));
3133         ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3134 
3135         exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3136 
3137         m_requestManager->releaseSensorQ();
3138         m_requestManager->ResetEntry();
3139         ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3140         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3141         return;
3142     }
3143 
3144     if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3145     {
3146         ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3147         int targetStreamIndex = 0, i=0;
3148         int matchedFrameCnt = -1, processingReqIndex;
3149         struct camera2_shot_ext *shot_ext;
3150         struct camera2_shot_ext *shot_ext_capture;
3151         bool triggered = false;
3152 
3153         /* dqbuf from sensor */
3154         ALOGV("Sensor DQbuf start");
3155         index = cam_int_dqbuf(&(m_camera_info.sensor));
3156         m_requestManager->pushSensorQ(index);
3157         ALOGV("Sensor DQbuf done(%d)", index);
3158         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3159 
3160         if (m_nightCaptureCnt != 0) {
3161             matchedFrameCnt = m_nightCaptureFrameCnt;
3162         } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3163             matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3164             ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3165         } else {
3166             matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext, m_isRequestQueueNull);
3167         }
3168 
3169         if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3170             matchedFrameCnt = m_vdisDupFrame;
3171         }
3172 
3173         if (matchedFrameCnt != -1) {
3174             if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3175                 frameTime = systemTime();
3176                 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3177                 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3178             } else {
3179                 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3180             }
3181 
3182             // face af mode setting in case of face priority scene mode
3183             if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3184                 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3185                 m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3186             }
3187 
3188             m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3189             float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3190             int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3191 
3192             m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3193                            m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3194                            &crop_x, &crop_y,
3195                            &crop_w, &crop_h,
3196                            0);
3197 
3198             if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3199                 zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3200                 zoomHeight = zoomWidth *
3201                         m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3202             } else {
3203                 zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3204                 zoomWidth = zoomHeight *
3205                         m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3206             }
3207             zoomLeft = (crop_w - zoomWidth) / 2;
3208             zoomTop = (crop_h - zoomHeight) / 2;
3209 
3210             int32_t new_cropRegion[3] = { (int32_t)zoomLeft, (int32_t)zoomTop, (int32_t)zoomWidth };
3211 
3212             int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3213             if (cropCompensation)
3214                 new_cropRegion[2] -= cropCompensation;
3215 
3216             shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3217             shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3218             shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3219             if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3220                 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
3221                 shot_ext->shot.ctl.aa.afMode = m_afMode;
3222                 if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3223                     ALOGD("### With Automatic triger for continuous modes");
3224                     m_afState = HAL_AFSTATE_STARTED;
3225                     shot_ext->shot.ctl.aa.afTrigger = 1;
3226                     triggered = true;
3227                     if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3228                             (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3229                         switch (m_afMode) {
3230                         case AA_AFMODE_CONTINUOUS_PICTURE:
3231                             shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3232                             ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3233                             // FALLTHRU
3234                         default:
3235                             break;
3236                         }
3237                     }
3238                     // reset flash result
3239                     if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3240                         m_ctlInfo.flash.m_flashEnableFlg = false;
3241                         m_ctlInfo.flash.m_afFlashDoneFlg = false;
3242                         m_ctlInfo.flash.m_flashDecisionResult = false;
3243                         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
3244                     }
3245                     m_ctlInfo.af.m_afTriggerTimeOut = 1;
3246                 }
3247 
3248                 m_IsAfModeUpdateRequired = false;
3249                 // support inifinity focus mode
3250                 if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3251                     shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3252                     shot_ext->shot.ctl.aa.afTrigger = 1;
3253                     triggered = true;
3254                 }
3255                 if (m_afMode2 != NO_CHANGE) {
3256                     enum aa_afmode tempAfMode = m_afMode2;
3257                     m_afMode2 = NO_CHANGE;
3258                     SetAfMode(tempAfMode);
3259                 }
3260             }
3261             else {
3262                 shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3263             }
3264             if (m_IsAfTriggerRequired) {
3265                 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3266                     // flash case
3267                     if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3268                         if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3269                             // Flash is enabled and start AF
3270                             m_afTrigger(shot_ext, 1);
3271                         } else {
3272                             m_afTrigger(shot_ext, 0);
3273                         }
3274                     }
3275                 } else {
3276                     // non-flash case
3277                     m_afTrigger(shot_ext, 0);
3278                 }
3279             } else {
3280                 shot_ext->shot.ctl.aa.afTrigger = 0;
3281             }
3282 
3283             if (m_wideAspect) {
3284                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3285             } else {
3286                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3287             }
3288             if (triggered)
3289                 shot_ext->shot.ctl.aa.afTrigger = 1;
3290 
3291             // TODO : check collision with AFMode Update
3292             if (m_IsAfLockRequired) {
3293                 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3294                 m_IsAfLockRequired = false;
3295             }
3296             ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3297                 index,
3298                 shot_ext->shot.ctl.request.frameCount,
3299                 shot_ext->request_scp,
3300                 shot_ext->request_scc,
3301                 shot_ext->dis_bypass, sizeof(camera2_shot));
3302 
3303             // update AF region
3304             m_updateAfRegion(shot_ext);
3305 
3306             m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3307             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3308                     && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3309                 shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3310             if (m_nightCaptureCnt == 0) {
3311                 if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3312                         && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3313                     shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3314                     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3315                     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3316                     m_nightCaptureCnt = 4;
3317                     m_nightCaptureFrameCnt = matchedFrameCnt;
3318                     shot_ext->request_scc = 0;
3319                 }
3320             }
3321             else if (m_nightCaptureCnt == 1) {
3322                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3323                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3324                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3325                 m_nightCaptureCnt--;
3326                 m_nightCaptureFrameCnt = 0;
3327                 shot_ext->request_scc = 1;
3328             }
3329             else if (m_nightCaptureCnt == 2) {
3330                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3331                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3332                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3333                 m_nightCaptureCnt--;
3334                 shot_ext->request_scc = 0;
3335             }
3336             else if (m_nightCaptureCnt == 3) {
3337                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3338                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3339                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3340                 m_nightCaptureCnt--;
3341                 shot_ext->request_scc = 0;
3342             }
3343             else if (m_nightCaptureCnt == 4) {
3344                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3345                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3346                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3347                 m_nightCaptureCnt--;
3348                 shot_ext->request_scc = 0;
3349             }
3350 
3351             switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
3352             case 15:
3353                 shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
3354                 break;
3355 
3356             case 24:
3357                 shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
3358                 break;
3359 
3360             case 25:
3361                 shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
3362                 break;
3363 
3364             case 30:
3365             default:
3366                 shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
3367                 break;
3368             }
3369             shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3370 
3371             // Flash mode
3372             // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3373             if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3374                     && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3375                     && (m_cameraId == 0)) {
3376                 if (!m_ctlInfo.flash.m_flashDecisionResult) {
3377                     m_ctlInfo.flash.m_flashEnableFlg = false;
3378                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3379                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
3380                 } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3381                                           (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3382                     ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3383                     shot_ext->request_scc = 0;
3384                     m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3385                     m_ctlInfo.flash.m_flashEnableFlg = true;
3386                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3387                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3388                 } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3389                     ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3390                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3391                     m_ctlInfo.flash.m_flashEnableFlg = false;
3392                     m_ctlInfo.flash.m_afFlashDoneFlg= false;
3393                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
3394                 }
3395             } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3396                 m_ctlInfo.flash.m_flashDecisionResult = false;
3397             }
3398 
3399             if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
3400                 if (m_ctlInfo.flash.m_flashTorchMode == false) {
3401                     m_ctlInfo.flash.m_flashTorchMode = true;
3402                 }
3403             } else {
3404                 if (m_ctlInfo.flash.m_flashTorchMode == true) {
3405                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3406                     shot_ext->shot.ctl.flash.firingPower = 0;
3407                     m_ctlInfo.flash.m_flashTorchMode = false;
3408                 } else {
3409                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3410                 }
3411             }
3412 
3413             if (shot_ext->isReprocessing) {
3414                 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3415                 m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3416                 shot_ext->request_scp = 0;
3417                 shot_ext->request_scc = 0;
3418                 m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3419                 m_ctlInfo.flash.m_flashDecisionResult = false;
3420                 void *shot = m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt);
3421                 if (!shot) { // m_isRequestQueueNull reuse current
3422                     ALOGD("(%s): isReprocessing: "
3423                         "m_reprocessingFrameCnt missing, using shot_ext",
3424                         __FUNCTION__);
3425                     shot = shot_ext;
3426                 }
3427                 memcpy(&m_jpegMetadata, shot, sizeof(struct camera2_shot_ext));
3428                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3429                 m_ctlInfo.flash.m_flashEnableFlg = false;
3430             }
3431 
3432             if (m_ctlInfo.flash.m_flashEnableFlg) {
3433                 m_preCaptureListenerSensor(shot_ext);
3434                 m_preCaptureSetter(shot_ext);
3435             }
3436 
3437             ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3438             (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3439             (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3440             (int)(shot_ext->shot.ctl.aa.afTrigger));
3441 
3442             if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3443                 shot_ext->dis_bypass = 1;
3444                 shot_ext->dnr_bypass = 1;
3445                 shot_ext->request_scp = 0;
3446                 shot_ext->request_scc = 0;
3447                 m_vdisBubbleCnt--;
3448                 matchedFrameCnt = -1;
3449             } else {
3450                 m_vdisDupFrame = matchedFrameCnt;
3451             }
3452             if (m_scpForceSuspended)
3453                 shot_ext->request_scc = 0;
3454 
3455             uint32_t current_scp = shot_ext->request_scp;
3456             uint32_t current_scc = shot_ext->request_scc;
3457 
3458             if (shot_ext->shot.dm.request.frameCount == 0) {
3459                 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3460             }
3461 
3462             cam_int_qbuf(&(m_camera_info.isp), index);
3463 
3464             ALOGV("### isp DQBUF start");
3465             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3466 
3467             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3468 
3469             if (m_ctlInfo.flash.m_flashEnableFlg)
3470                 m_preCaptureListenerISP(shot_ext);
3471 
3472             ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3473                 index,
3474                 shot_ext->shot.ctl.request.frameCount,
3475                 shot_ext->request_scp,
3476                 shot_ext->request_scc,
3477                 shot_ext->dis_bypass,
3478                 shot_ext->dnr_bypass, sizeof(camera2_shot));
3479 
3480             ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3481                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3482                 (int)(shot_ext->shot.dm.aa.awbMode),
3483                 (int)(shot_ext->shot.dm.aa.afMode));
3484 
3485 #ifndef ENABLE_FRAME_SYNC
3486             m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3487 #endif
3488 
3489             if (!shot_ext->fd_bypass) {
3490                 /* FD orientation axis transformation */
3491                 for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3492                     if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3493                         shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3494                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][0])
3495                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3496                     if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3497                         shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3498                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][1])
3499                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3500                     if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3501                         shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3502                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][2])
3503                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3504                     if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3505                         shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3506                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][3])
3507                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3508                 }
3509             }
3510             // aeState control
3511             if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3512                 m_preCaptureAeState(shot_ext);
3513 
3514             // At scene mode face priority
3515             if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3516                 shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3517 
3518             if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3519                 m_requestManager->ApplyDynamicMetadata(shot_ext);
3520             }
3521 
3522             if (current_scc != shot_ext->request_scc) {
3523                 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3524                                 __FUNCTION__, current_scc, shot_ext->request_scc);
3525                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3526             }
3527             if (shot_ext->request_scc) {
3528                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3529                 if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3530                     void *shot = shot_ext;
3531                     if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE) {
3532                         shot = m_requestManager->GetInternalShotExtByFrameCnt(
3533                             shot_ext->shot.ctl.request.frameCount);
3534                         if (!shot) { // m_isRequestQueueNull reuse current
3535                             ALOGD("(%s): request_scc: "
3536                                 "m_reprocessingFrameCnt missing, using shot_ext",
3537                                 __FUNCTION__);
3538                             shot = shot_ext;
3539                         }
3540                     }
3541                     memcpy(&m_jpegMetadata, shot, sizeof(struct camera2_shot_ext));
3542                 }
3543                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3544             }
3545             if (current_scp != shot_ext->request_scp) {
3546                 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3547                                 __FUNCTION__, current_scp, shot_ext->request_scp);
3548                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3549             }
3550             if (shot_ext->request_scp) {
3551                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3552                 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3553             }
3554 
3555             ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3556                shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3557             if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3558                 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3559                 m_scp_closed = true;
3560             }
3561             else
3562                 m_scp_closed = false;
3563 
3564             OnAfNotification(shot_ext->shot.dm.aa.afState);
3565             OnPrecaptureMeteringNotificationISP();
3566         }   else {
3567             memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3568             shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3569             shot_ext->request_sensor = 1;
3570             shot_ext->dis_bypass = 1;
3571             shot_ext->dnr_bypass = 1;
3572             shot_ext->fd_bypass = 1;
3573             shot_ext->drc_bypass = 1;
3574             shot_ext->request_scc = 0;
3575             shot_ext->request_scp = 0;
3576             if (m_wideAspect) {
3577                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3578             } else {
3579                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3580             }
3581             shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3582             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3583                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3584                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3585             }
3586             shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3587             shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3588             ALOGV("### isp QBUF start (bubble)");
3589             ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3590                 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3591                 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3592                 (int)(shot_ext->shot.ctl.aa.afTrigger));
3593 
3594             cam_int_qbuf(&(m_camera_info.isp), index);
3595             ALOGV("### isp DQBUF start (bubble)");
3596             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3597             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3598             ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3599                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3600                 (int)(shot_ext->shot.dm.aa.awbMode),
3601                 (int)(shot_ext->shot.dm.aa.afMode));
3602 
3603             OnAfNotification(shot_ext->shot.dm.aa.afState);
3604         }
3605 
3606         index = m_requestManager->popSensorQ();
3607         if(index < 0){
3608             ALOGE("sensorQ is empty");
3609             return;
3610         }
3611 
3612         processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
3613         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3614         if (m_scp_closing || m_scp_closed) {
3615             ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3616             shot_ext->request_scc = 0;
3617             shot_ext->request_scp = 0;
3618             shot_ext->request_sensor = 0;
3619         }
3620         cam_int_qbuf(&(m_camera_info.sensor), index);
3621         ALOGV("Sensor Qbuf done(%d)", index);
3622 
3623         if (!m_scp_closing
3624             && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3625             ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3626                                     matchedFrameCnt, processingReqIndex);
3627             selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3628         }
3629     }
3630     return;
3631 }
3632 
m_streamBufferInit(SignalDrivenThread * self)3633 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3634 {
3635     uint32_t                currentSignal   = self->GetProcessingSignal();
3636     StreamThread *          selfThread      = ((StreamThread*)self);
3637     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3638     node_info_t             *currentNode    = selfStreamParms->node;
3639     substream_parameters_t  *subParms;
3640     buffer_handle_t * buf = NULL;
3641     status_t res;
3642     void *virtAddr[3];
3643     int i, j;
3644     int index;
3645     nsecs_t timestamp;
3646 
3647     if (!(selfThread->m_isBufferInit))
3648     {
3649         for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3650             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3651             if (res != NO_ERROR || buf == NULL) {
3652                 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3653                 return;
3654             }
3655             ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3656                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3657 
3658             index = selfThread->findBufferIndex(buf);
3659             if (index == -1) {
3660                 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3661             }
3662             else {
3663                 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3664                     __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3665                 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3666                     selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3667                 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3668                     selfStreamParms->svcBufStatus[index] = ON_HAL;
3669                 else {
3670                     ALOGV("DBG(%s): buffer status abnormal (%d) "
3671                         , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3672                 }
3673                 selfStreamParms->numSvcBufsInHal++;
3674             }
3675             selfStreamParms->bufIndex = 0;
3676         }
3677         selfThread->m_isBufferInit = true;
3678     }
3679     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3680         if (selfThread->m_attachedSubStreams[i].streamId == -1)
3681             continue;
3682 
3683         subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3684         if (subParms->type && subParms->needBufferInit) {
3685             ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3686                 __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3687             int checkingIndex = 0;
3688             bool found = false;
3689             for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3690                 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3691                 if (res != NO_ERROR || buf == NULL) {
3692                     ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3693                     return;
3694                 }
3695                 subParms->numSvcBufsInHal++;
3696                 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3697                    subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3698 
3699                 if (m_grallocHal->lock(m_grallocHal, *buf,
3700                        subParms->usage, 0, 0,
3701                        subParms->width, subParms->height, virtAddr) != 0) {
3702                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3703                 }
3704                 else {
3705                       ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3706                         __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3707                 }
3708                 found = false;
3709                 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3710                     if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3711                         found = true;
3712                         break;
3713                     }
3714                 }
3715                 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3716                 if (!found) break;
3717 
3718                 index = checkingIndex;
3719 
3720                 if (index == -1) {
3721                     ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3722                 }
3723                 else {
3724                     ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3725                         __FUNCTION__, index, subParms->svcBufStatus[index]);
3726                     if (subParms->svcBufStatus[index]== ON_SERVICE)
3727                         subParms->svcBufStatus[index] = ON_HAL;
3728                     else {
3729                         ALOGV("DBG(%s): buffer status abnormal (%d) "
3730                             , __FUNCTION__, subParms->svcBufStatus[index]);
3731                     }
3732                     if (*buf != subParms->svcBufHandle[index])
3733                         ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3734                     else
3735                         ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3736                 }
3737                 subParms->svcBufIndex = 0;
3738             }
3739             if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3740                 m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3741                 m_resizeBuf.size.extS[1] = 0;
3742                 m_resizeBuf.size.extS[2] = 0;
3743 
3744                 if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3745                     ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3746                 }
3747             }
3748             if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3749                 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3750                 subParms->height, &m_previewCbBuf);
3751 
3752                 if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3753                     ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3754                 }
3755             }
3756             subParms->needBufferInit= false;
3757         }
3758     }
3759 }
3760 
m_streamThreadInitialize(SignalDrivenThread * self)3761 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3762 {
3763     StreamThread *          selfThread      = ((StreamThread*)self);
3764     ALOGV("DEBUG(%s): ", __FUNCTION__ );
3765     memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3766     selfThread->m_isBufferInit = false;
3767     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3768         selfThread->m_attachedSubStreams[i].streamId    = -1;
3769         selfThread->m_attachedSubStreams[i].priority    = 0;
3770     }
3771     return;
3772 }
3773 
m_runSubStreamFunc(StreamThread * selfThread,ExynosBuffer * srcImageBuf,int stream_id,nsecs_t frameTimeStamp)3774 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3775     int stream_id, nsecs_t frameTimeStamp)
3776 {
3777     substream_parameters_t  *subParms = &m_subStreams[stream_id];
3778 
3779     switch (stream_id) {
3780 
3781     case STREAM_ID_JPEG:
3782         return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3783 
3784     case STREAM_ID_RECORD:
3785         return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3786 
3787     case STREAM_ID_PRVCB:
3788         return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3789 
3790     default:
3791         return 0;
3792     }
3793 }
m_streamFunc_direct(SignalDrivenThread * self)3794 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3795 {
3796     uint32_t                currentSignal   = self->GetProcessingSignal();
3797     StreamThread *          selfThread      = ((StreamThread*)self);
3798     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3799     node_info_t             *currentNode    = selfStreamParms->node;
3800     int i = 0;
3801     nsecs_t frameTimeStamp;
3802 
3803     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3804         CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3805 
3806         if (selfThread->m_isBufferInit) {
3807             if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3808                 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3809                     selfThread->m_index, currentNode->fd);
3810                 if (cam_int_streamoff(currentNode) < 0 ) {
3811                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3812                 }
3813                 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3814                         selfThread->m_index, currentNode->fd);
3815                 currentNode->buffers = 0;
3816                 cam_int_reqbufs(currentNode);
3817                 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3818                         selfThread->m_index, currentNode->fd);
3819             }
3820         }
3821 #ifdef ENABLE_FRAME_SYNC
3822         // free metabuffers
3823         for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3824             if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3825                 freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3826                 selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3827                 selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3828             }
3829 #endif
3830         selfThread->m_isBufferInit = false;
3831         selfThread->m_releasing = false;
3832         selfThread->m_activated = false;
3833         ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3834         return;
3835     }
3836     if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3837         status_t    res;
3838         buffer_handle_t * buf = NULL;
3839         bool found = false;
3840         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3841             __FUNCTION__, selfThread->m_index);
3842         res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3843         if (res != NO_ERROR || buf == NULL) {
3844             ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3845             return;
3846         }
3847         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3848         int checkingIndex = 0;
3849         for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3850             if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3851                 found = true;
3852                 break;
3853             }
3854         }
3855         ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3856             __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3857 
3858         if (!found) return;
3859 
3860         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3861             if (selfThread->m_attachedSubStreams[i].streamId == -1)
3862                 continue;
3863 
3864 #ifdef ENABLE_FRAME_SYNC
3865             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3866             m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3867 #else
3868             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3869 #endif
3870             if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3871                 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3872                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3873         }
3874 
3875         res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3876         if (res != NO_ERROR) {
3877             ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3878             return;
3879         }
3880         ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3881             __FUNCTION__,selfThread->m_index);
3882 
3883         return;
3884     }
3885     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3886         buffer_handle_t * buf = NULL;
3887         status_t res = 0;
3888         int i, j;
3889         int index;
3890         nsecs_t timestamp;
3891 #ifdef ENABLE_FRAME_SYNC
3892         camera2_stream *frame;
3893         uint8_t currentOutputStreams;
3894         bool directOutputEnabled = false;
3895 #endif
3896         int numOfUndqbuf = 0;
3897 
3898         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3899 
3900         m_streamBufferInit(self);
3901 
3902         do {
3903             ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3904                 selfThread->m_index, selfThread->streamType);
3905 
3906 #ifdef ENABLE_FRAME_SYNC
3907             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3908             frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3909             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3910             currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3911             ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3912             if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3913                  ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3914                 directOutputEnabled = true;
3915             }
3916             if (!directOutputEnabled) {
3917                 if (!m_nightCaptureFrameCnt)
3918                     m_requestManager->NotifyStreamOutput(frame->rcount);
3919             }
3920 #else
3921             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3922             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3923 #endif
3924             ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3925                 selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3926 
3927             if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3928                 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3929                        __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3930             selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3931 
3932             for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3933                 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3934                     continue;
3935 #ifdef ENABLE_FRAME_SYNC
3936                 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3937                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3938                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3939                 }
3940 #else
3941                 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3942                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3943                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3944                 }
3945 #endif
3946             }
3947 
3948             if (m_requestManager->GetSkipCnt() <= 0) {
3949 #ifdef ENABLE_FRAME_SYNC
3950                 if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3951                     ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3952                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3953                             frameTimeStamp,
3954                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3955                 }
3956                 else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3957                     ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
3958                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3959                                 frameTimeStamp,
3960                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3961                 }
3962                 else {
3963                     res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3964                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3965                     ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3966                 }
3967 #else
3968                 if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3969                     ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3970                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3971                             frameTimeStamp,
3972                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3973                 }
3974                 else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3975                     ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3976                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3977                                 frameTimeStamp,
3978                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3979                 }
3980 #endif
3981                 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3982             }
3983             else {
3984                 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3985                         &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3986                 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3987             }
3988 #ifdef ENABLE_FRAME_SYNC
3989             if (directOutputEnabled) {
3990                 if (!m_nightCaptureFrameCnt)
3991                      m_requestManager->NotifyStreamOutput(frame->rcount);
3992             }
3993 #endif
3994             if (res == 0) {
3995                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3996                 selfStreamParms->numSvcBufsInHal--;
3997             }
3998             else {
3999                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
4000             }
4001 
4002         }
4003         while(0);
4004 
4005         while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
4006                     < selfStreamParms->minUndequedBuffer) {
4007             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
4008             if (res != NO_ERROR || buf == NULL) {
4009                 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
4010                 break;
4011             }
4012             selfStreamParms->numSvcBufsInHal++;
4013             ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
4014                 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
4015                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4016             const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4017 
4018             bool found = false;
4019             int checkingIndex = 0;
4020             for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
4021                 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4022                     found = true;
4023                     break;
4024                 }
4025             }
4026             if (!found) break;
4027             selfStreamParms->bufIndex = checkingIndex;
4028             if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
4029                 uint32_t    plane_index = 0;
4030                 ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
4031                 struct v4l2_buffer v4l2_buf;
4032                 struct v4l2_plane  planes[VIDEO_MAX_PLANES];
4033 
4034                 v4l2_buf.m.planes   = planes;
4035                 v4l2_buf.type       = currentNode->type;
4036                 v4l2_buf.memory     = currentNode->memory;
4037                 v4l2_buf.index      = selfStreamParms->bufIndex;
4038                 v4l2_buf.length     = currentNode->planes;
4039 
4040                 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
4041                 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
4042                 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
4043                 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
4044                     v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
4045                 }
4046 #ifdef ENABLE_FRAME_SYNC
4047                 /* add plane for metadata*/
4048                 v4l2_buf.length += selfStreamParms->metaPlanes;
4049                 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
4050                 v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
4051 #endif
4052                 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
4053                     ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4054                         __FUNCTION__, selfThread->m_index);
4055                     return;
4056                 }
4057                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
4058                 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4059                     __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
4060             }
4061         }
4062 
4063         ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
4064     }
4065     return;
4066 }
4067 
m_streamFunc_indirect(SignalDrivenThread * self)4068 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4069 {
4070     uint32_t                currentSignal   = self->GetProcessingSignal();
4071     StreamThread *          selfThread      = ((StreamThread*)self);
4072     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4073     node_info_t             *currentNode    = selfStreamParms->node;
4074 
4075 
4076     if (currentSignal & SIGNAL_THREAD_RELEASE) {
4077         CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4078 
4079         if (selfThread->m_isBufferInit) {
4080             if (currentNode->fd == m_camera_info.capture.fd) {
4081                 if (m_camera_info.capture.status == true) {
4082                     ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4083                     selfThread->m_index, currentNode->fd);
4084                     if (cam_int_streamoff(currentNode) < 0 ){
4085                         ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4086                     } else {
4087                         m_camera_info.capture.status = false;
4088                     }
4089                 }
4090             } else {
4091                 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4092                 selfThread->m_index, currentNode->fd);
4093                 if (cam_int_streamoff(currentNode) < 0 ){
4094                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4095                 }
4096             }
4097             ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4098             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4099                     selfThread->m_index, currentNode->fd);
4100             currentNode->buffers = 0;
4101             cam_int_reqbufs(currentNode);
4102             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4103                     selfThread->m_index, currentNode->fd);
4104         }
4105 
4106         selfThread->m_isBufferInit = false;
4107         selfThread->m_releasing = false;
4108         selfThread->m_activated = false;
4109         ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4110         return;
4111     }
4112 
4113     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4114 #ifdef ENABLE_FRAME_SYNC
4115         camera2_stream *frame;
4116         uint8_t currentOutputStreams;
4117 #endif
4118         nsecs_t frameTimeStamp;
4119 
4120         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4121             __FUNCTION__,selfThread->m_index);
4122 
4123         m_streamBufferInit(self);
4124 
4125         ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4126         selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4127         ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4128             selfThread->m_index, selfStreamParms->bufIndex);
4129 
4130 #ifdef ENABLE_FRAME_SYNC
4131         frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4132         frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4133         currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4134         ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4135 #else
4136         frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4137 #endif
4138 
4139         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4140             if (selfThread->m_attachedSubStreams[i].streamId == -1)
4141                 continue;
4142 #ifdef ENABLE_FRAME_SYNC
4143             if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4144                 m_requestManager->NotifyStreamOutput(frame->rcount);
4145                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4146                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4147             }
4148 #else
4149             if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4150                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4151                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4152             }
4153 #endif
4154         }
4155         cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4156         ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4157 
4158 
4159 
4160         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4161             __FUNCTION__, selfThread->m_index);
4162     }
4163 
4164 
4165     return;
4166 }
4167 
m_streamThreadFunc(SignalDrivenThread * self)4168 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4169 {
4170     uint32_t                currentSignal   = self->GetProcessingSignal();
4171     StreamThread *          selfThread      = ((StreamThread*)self);
4172     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4173     node_info_t             *currentNode    = selfStreamParms->node;
4174 
4175     ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4176 
4177     // Do something in Child thread handler
4178     // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4179     if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4180         m_streamFunc_direct(self);
4181     } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4182         m_streamFunc_indirect(self);
4183     }
4184 
4185     return;
4186 }
m_jpegCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4187 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4188 {
4189     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4190     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4191     status_t    res;
4192     ExynosRect jpegRect;
4193     bool found = false;
4194     int srcW, srcH, srcCropX, srcCropY;
4195     int pictureW, pictureH, pictureFramesize = 0;
4196     int pictureFormat;
4197     int cropX, cropY, cropW, cropH = 0;
4198     ExynosBuffer resizeBufInfo;
4199     ExynosRect   m_jpegPictureRect;
4200     buffer_handle_t * buf = NULL;
4201     camera2_jpeg_blob * jpegBlob = NULL;
4202     int jpegBufSize = 0;
4203 
4204     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4205     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4206         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4207             found = true;
4208             break;
4209         }
4210         subParms->svcBufIndex++;
4211         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4212             subParms->svcBufIndex = 0;
4213     }
4214     if (!found) {
4215         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4216         subParms->svcBufIndex++;
4217         return 1;
4218     }
4219 
4220     {
4221         Mutex::Autolock lock(m_jpegEncoderLock);
4222         m_jpegEncodingCount++;
4223     }
4224 
4225     m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4226                     m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4227                     &srcCropX, &srcCropY,
4228                     &srcW, &srcH,
4229                     0);
4230 
4231     m_jpegPictureRect.w = subParms->width;
4232     m_jpegPictureRect.h = subParms->height;
4233 
4234      ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4235               __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4236                    m_jpegPictureRect.w, m_jpegPictureRect.h);
4237 
4238     m_getRatioSize(srcW, srcH,
4239                    m_jpegPictureRect.w, m_jpegPictureRect.h,
4240                    &cropX, &cropY,
4241                    &pictureW, &pictureH,
4242                    0);
4243     pictureFormat = V4L2_PIX_FMT_YUYV;
4244     pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4245 
4246     if (m_exynosPictureCSC) {
4247         float zoom_w = 0, zoom_h = 0;
4248         if (m_zoomRatio == 0)
4249             m_zoomRatio = 1;
4250 
4251         if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4252             zoom_w =  pictureW / m_zoomRatio;
4253             zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4254         } else {
4255             zoom_h = pictureH / m_zoomRatio;
4256             zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4257         }
4258         cropX = (srcW - zoom_w) / 2;
4259         cropY = (srcH - zoom_h) / 2;
4260         cropW = zoom_w;
4261         cropH = zoom_h;
4262 
4263         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4264               __FUNCTION__, cropX, cropY, cropW, cropH);
4265 
4266         csc_set_src_format(m_exynosPictureCSC,
4267                            ALIGN(srcW, 16), ALIGN(srcH, 16),
4268                            cropX, cropY, cropW, cropH,
4269                            V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4270                            0);
4271 
4272         csc_set_dst_format(m_exynosPictureCSC,
4273                            m_jpegPictureRect.w, m_jpegPictureRect.h,
4274                            0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4275                            V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4276                            0);
4277         for (int i = 0 ; i < 3 ; i++)
4278             ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4279                 __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4280         csc_set_src_buffer(m_exynosPictureCSC,
4281                            (void **)&srcImageBuf->fd.fd);
4282 
4283         csc_set_dst_buffer(m_exynosPictureCSC,
4284                            (void **)&m_resizeBuf.fd.fd);
4285         for (int i = 0 ; i < 3 ; i++)
4286             ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4287                 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4288 
4289         if (csc_convert(m_exynosPictureCSC) != 0)
4290             ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4291 
4292     }
4293     else {
4294         ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4295     }
4296 
4297     resizeBufInfo = m_resizeBuf;
4298 
4299     m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4300 
4301     for (int i = 1; i < 3; i++) {
4302         if (m_resizeBuf.size.extS[i] != 0)
4303             m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4304 
4305         ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4306     }
4307 
4308     jpegRect.w = m_jpegPictureRect.w;
4309     jpegRect.h = m_jpegPictureRect.h;
4310     jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4311 
4312     for (int j = 0 ; j < 3 ; j++)
4313         ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4314             __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4315             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4316             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4317 
4318     jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4319     if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4320         ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4321     } else {
4322         m_resizeBuf = resizeBufInfo;
4323 
4324         int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4325         ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4326             m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4327         char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4328         jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4329 
4330         if (jpegBuffer[jpegSize-1] == 0)
4331             jpegSize--;
4332         jpegBlob->jpeg_size = jpegSize;
4333         jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4334     }
4335     subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4336     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4337 
4338     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4339             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4340     if (res == 0) {
4341         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4342         subParms->numSvcBufsInHal--;
4343     }
4344     else {
4345         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4346     }
4347 
4348     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4349     {
4350         bool found = false;
4351         int checkingIndex = 0;
4352 
4353         ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4354 
4355         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4356         if (res != NO_ERROR || buf == NULL) {
4357             ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4358             break;
4359         }
4360         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4361         subParms->numSvcBufsInHal ++;
4362         ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4363            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4364 
4365 
4366         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4367             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4368                 found = true;
4369                 break;
4370             }
4371         }
4372         ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4373 
4374         if (!found) {
4375              break;
4376         }
4377 
4378         subParms->svcBufIndex = checkingIndex;
4379         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4380             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4381         }
4382         else {
4383             ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4384                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4385         }
4386     }
4387     {
4388         Mutex::Autolock lock(m_jpegEncoderLock);
4389         m_jpegEncodingCount--;
4390     }
4391     return 0;
4392 }
4393 
m_recordCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4394 int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4395 {
4396     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4397     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4398     status_t    res;
4399     ExynosRect jpegRect;
4400     bool found = false;
4401     int cropX, cropY, cropW, cropH = 0;
4402     buffer_handle_t * buf = NULL;
4403 
4404     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4405     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4406         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4407             found = true;
4408             break;
4409         }
4410         subParms->svcBufIndex++;
4411         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4412             subParms->svcBufIndex = 0;
4413     }
4414     if (!found) {
4415         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4416         subParms->svcBufIndex++;
4417         return 1;
4418     }
4419 
4420     if (m_exynosVideoCSC) {
4421         int videoW = subParms->width, videoH = subParms->height;
4422         int cropX, cropY, cropW, cropH = 0;
4423         int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4424         m_getRatioSize(previewW, previewH,
4425                        videoW, videoH,
4426                        &cropX, &cropY,
4427                        &cropW, &cropH,
4428                        0);
4429 
4430         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4431                  __FUNCTION__, cropX, cropY, cropW, cropH);
4432 
4433         csc_set_src_format(m_exynosVideoCSC,
4434                            ALIGN(previewW, 32), previewH,
4435                            cropX, cropY, cropW, cropH,
4436                            selfStreamParms->format,
4437                            0);
4438 
4439         csc_set_dst_format(m_exynosVideoCSC,
4440                            videoW, videoH,
4441                            0, 0, videoW, videoH,
4442                            subParms->format,
4443                            1);
4444 
4445         csc_set_src_buffer(m_exynosVideoCSC,
4446                         (void **)&srcImageBuf->fd.fd);
4447 
4448         csc_set_dst_buffer(m_exynosVideoCSC,
4449             (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4450 
4451         if (csc_convert(m_exynosVideoCSC) != 0) {
4452             ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4453         }
4454         else {
4455             ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4456         }
4457     }
4458     else {
4459         ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4460     }
4461 
4462     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4463 
4464     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4465             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4466     if (res == 0) {
4467         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4468         subParms->numSvcBufsInHal--;
4469     }
4470     else {
4471         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4472     }
4473 
4474     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4475     {
4476         bool found = false;
4477         int checkingIndex = 0;
4478 
4479         ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4480 
4481         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4482         if (res != NO_ERROR || buf == NULL) {
4483             ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4484             break;
4485         }
4486         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4487         subParms->numSvcBufsInHal ++;
4488         ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4489            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4490 
4491         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4492             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4493                 found = true;
4494                 break;
4495             }
4496         }
4497         ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4498 
4499         if (!found) {
4500              break;
4501         }
4502 
4503         subParms->svcBufIndex = checkingIndex;
4504         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4505             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4506         }
4507         else {
4508             ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4509                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4510         }
4511     }
4512     return 0;
4513 }
4514 
m_prvcbCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4515 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4516 {
4517     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4518     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4519     status_t    res;
4520     bool found = false;
4521     int cropX, cropY, cropW, cropH = 0;
4522     buffer_handle_t * buf = NULL;
4523 
4524     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4525     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4526         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4527             found = true;
4528             break;
4529         }
4530         subParms->svcBufIndex++;
4531         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4532             subParms->svcBufIndex = 0;
4533     }
4534     if (!found) {
4535         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4536         subParms->svcBufIndex++;
4537         return 1;
4538     }
4539 
4540     if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4541         if (m_exynosVideoCSC) {
4542             int previewCbW = subParms->width, previewCbH = subParms->height;
4543             int cropX, cropY, cropW, cropH = 0;
4544             int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4545             m_getRatioSize(previewW, previewH,
4546                            previewCbW, previewCbH,
4547                            &cropX, &cropY,
4548                            &cropW, &cropH,
4549                            0);
4550 
4551             ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4552                      __FUNCTION__, cropX, cropY, cropW, cropH);
4553             csc_set_src_format(m_exynosVideoCSC,
4554                                ALIGN(previewW, 32), previewH,
4555                                cropX, cropY, cropW, cropH,
4556                                selfStreamParms->format,
4557                                0);
4558 
4559             csc_set_dst_format(m_exynosVideoCSC,
4560                                previewCbW, previewCbH,
4561                                0, 0, previewCbW, previewCbH,
4562                                subParms->internalFormat,
4563                                1);
4564 
4565             csc_set_src_buffer(m_exynosVideoCSC,
4566                         (void **)&srcImageBuf->fd.fd);
4567 
4568             csc_set_dst_buffer(m_exynosVideoCSC,
4569                 (void **)(&(m_previewCbBuf.fd.fd)));
4570 
4571             if (csc_convert(m_exynosVideoCSC) != 0) {
4572                 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4573             }
4574             else {
4575                 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4576             }
4577             if (previewCbW == ALIGN(previewCbW, 16)) {
4578                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4579                     m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4580                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4581                     m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4582             }
4583             else {
4584                 // TODO : copy line by line ?
4585             }
4586         }
4587         else {
4588             ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4589         }
4590     }
4591     else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4592         int previewCbW = subParms->width, previewCbH = subParms->height;
4593         int stride = ALIGN(previewCbW, 16);
4594         int uv_stride = ALIGN(previewCbW/2, 16);
4595         int c_stride = ALIGN(stride / 2, 16);
4596 
4597         if (previewCbW == ALIGN(previewCbW, 32)) {
4598             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4599                 srcImageBuf->virt.extP[0], stride * previewCbH);
4600             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4601                 srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4602             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4603                 srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4604         } else {
4605             char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4606             char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4607             for (int i = 0 ; i < previewCbH ; i++) {
4608                 memcpy(dstAddr, srcAddr, previewCbW);
4609                 dstAddr += stride;
4610                 srcAddr += ALIGN(stride, 32);
4611             }
4612             dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4613             srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4614             for (int i = 0 ; i < previewCbH/2 ; i++) {
4615                 memcpy(dstAddr, srcAddr, previewCbW/2);
4616                 dstAddr += c_stride;
4617                 srcAddr += uv_stride;
4618             }
4619             srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4620             for (int i = 0 ; i < previewCbH/2 ; i++) {
4621                 memcpy(dstAddr, srcAddr, previewCbW/2);
4622                 dstAddr += c_stride;
4623                 srcAddr += uv_stride;
4624             }
4625         }
4626     }
4627     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4628 
4629     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4630             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4631     if (res == 0) {
4632         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4633         subParms->numSvcBufsInHal--;
4634     }
4635     else {
4636         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4637     }
4638 
4639     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4640     {
4641         bool found = false;
4642         int checkingIndex = 0;
4643 
4644         ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4645 
4646         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4647         if (res != NO_ERROR || buf == NULL) {
4648             ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4649             break;
4650         }
4651         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4652         subParms->numSvcBufsInHal ++;
4653         ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4654            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4655 
4656 
4657         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4658             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4659                 found = true;
4660                 break;
4661             }
4662         }
4663         ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4664 
4665         if (!found) {
4666              break;
4667         }
4668 
4669         subParms->svcBufIndex = checkingIndex;
4670         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4671             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4672         }
4673         else {
4674             ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4675                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4676         }
4677     }
4678     return 0;
4679 }
4680 
m_checkThumbnailSize(int w,int h)4681 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4682 {
4683     int sizeOfSupportList;
4684 
4685     //REAR Camera
4686     if(this->getCameraId() == 0) {
4687         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int32_t)*2);
4688 
4689         for(int i = 0; i < sizeOfSupportList; i++) {
4690             if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4691                 return true;
4692         }
4693 
4694     }
4695     else {
4696         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int32_t)*2);
4697 
4698         for(int i = 0; i < sizeOfSupportList; i++) {
4699             if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4700                 return true;
4701         }
4702     }
4703 
4704     return false;
4705 }
yuv2Jpeg(ExynosBuffer * yuvBuf,ExynosBuffer * jpegBuf,ExynosRect * rect)4706 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4707                             ExynosBuffer *jpegBuf,
4708                             ExynosRect *rect)
4709 {
4710     unsigned char *addr;
4711 
4712     ExynosJpegEncoderForCamera jpegEnc;
4713     bool ret = false;
4714     int res = 0;
4715 
4716     unsigned int *yuvSize = yuvBuf->size.extS;
4717 
4718     if (jpegEnc.create()) {
4719         ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4720         goto jpeg_encode_done;
4721     }
4722 
4723     if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4724         ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4725         goto jpeg_encode_done;
4726     }
4727 
4728     if (jpegEnc.setSize(rect->w, rect->h)) {
4729         ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4730         goto jpeg_encode_done;
4731     }
4732     ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4733 
4734     if (jpegEnc.setColorFormat(rect->colorFormat)) {
4735         ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4736         goto jpeg_encode_done;
4737     }
4738 
4739     if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4740         ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4741         goto jpeg_encode_done;
4742     }
4743 
4744     if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4745         mExifInfo.enableThumb = true;
4746         if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4747             // in the case of unsupported parameter, disable thumbnail
4748             mExifInfo.enableThumb = false;
4749         } else {
4750             m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4751             m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4752         }
4753 
4754         ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4755 
4756     } else {
4757         mExifInfo.enableThumb = false;
4758     }
4759 
4760     if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4761         ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4762         goto jpeg_encode_done;
4763     }
4764 
4765     ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4766     if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4767         ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4768         goto jpeg_encode_done;
4769     }
4770 
4771     m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4772     ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4773     if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4774         ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4775         goto jpeg_encode_done;
4776     }
4777     if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4778         ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4779         goto jpeg_encode_done;
4780     }
4781 
4782     if (jpegEnc.updateConfig()) {
4783         ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4784         goto jpeg_encode_done;
4785     }
4786 
4787     if ((res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo))) {
4788         ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4789         goto jpeg_encode_done;
4790     }
4791 
4792     ret = true;
4793 
4794 jpeg_encode_done:
4795 
4796     if (jpegEnc.flagCreate() == true)
4797         jpegEnc.destroy();
4798 
4799     return ret;
4800 }
4801 
OnPrecaptureMeteringTriggerStart(int id)4802 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4803 {
4804     m_ctlInfo.flash.m_precaptureTriggerId = id;
4805     m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4806     if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4807         // flash is required
4808         switch (m_ctlInfo.flash.m_flashCnt) {
4809         case IS_FLASH_STATE_AUTO_DONE:
4810         case IS_FLASH_STATE_AUTO_OFF:
4811             // Flash capture sequence, AF flash was executed before
4812             break;
4813         default:
4814             // Full flash sequence
4815             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4816             m_ctlInfo.flash.m_flashEnableFlg = true;
4817             m_ctlInfo.flash.m_flashTimeOut = 0;
4818         }
4819     } else {
4820         // Skip pre-capture in case of non-flash.
4821         ALOGV("[PreCap] Flash OFF mode ");
4822         m_ctlInfo.flash.m_flashEnableFlg = false;
4823         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4824     }
4825     ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4826     OnPrecaptureMeteringNotificationSensor();
4827 }
4828 
OnAfTrigger(int id)4829 void ExynosCameraHWInterface2::OnAfTrigger(int id)
4830 {
4831     m_afTriggerId = id;
4832 
4833     switch (m_afMode) {
4834     case AA_AFMODE_AUTO:
4835     case AA_AFMODE_MACRO:
4836     case AA_AFMODE_MANUAL:
4837         ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4838         // If flash is enable, Flash operation is executed before triggering AF
4839         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4840                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4841                 && (m_cameraId == 0)) {
4842             ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4843             m_ctlInfo.flash.m_flashEnableFlg = true;
4844             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4845             m_ctlInfo.flash.m_flashDecisionResult = false;
4846             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4847         }
4848         OnAfTriggerAutoMacro(id);
4849         break;
4850     case AA_AFMODE_CONTINUOUS_VIDEO:
4851         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4852         OnAfTriggerCAFVideo(id);
4853         break;
4854     case AA_AFMODE_CONTINUOUS_PICTURE:
4855         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4856         OnAfTriggerCAFPicture(id);
4857         break;
4858 
4859     case AA_AFMODE_OFF:
4860     default:
4861         break;
4862     }
4863 }
4864 
OnAfTriggerAutoMacro(int)4865 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/)
4866 {
4867     int nextState = NO_TRANSITION;
4868 
4869     switch (m_afState) {
4870     case HAL_AFSTATE_INACTIVE:
4871     case HAL_AFSTATE_PASSIVE_FOCUSED:
4872     case HAL_AFSTATE_SCANNING:
4873         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4874         m_IsAfTriggerRequired = true;
4875         break;
4876     case HAL_AFSTATE_NEEDS_COMMAND:
4877         nextState = NO_TRANSITION;
4878         break;
4879     case HAL_AFSTATE_STARTED:
4880         nextState = NO_TRANSITION;
4881         break;
4882     case HAL_AFSTATE_LOCKED:
4883         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4884         m_IsAfTriggerRequired = true;
4885         break;
4886     case HAL_AFSTATE_FAILED:
4887         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4888         m_IsAfTriggerRequired = true;
4889         break;
4890     default:
4891         break;
4892     }
4893     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4894     if (nextState != NO_TRANSITION)
4895         m_afState = nextState;
4896 }
4897 
OnAfTriggerCAFPicture(int id)4898 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4899 {
4900     int nextState = NO_TRANSITION;
4901 
4902     switch (m_afState) {
4903     case HAL_AFSTATE_INACTIVE:
4904         nextState = HAL_AFSTATE_FAILED;
4905         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4906         break;
4907     case HAL_AFSTATE_NEEDS_COMMAND:
4908         // not used
4909         break;
4910     case HAL_AFSTATE_STARTED:
4911         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4912         m_AfHwStateFailed = false;
4913         break;
4914     case HAL_AFSTATE_SCANNING:
4915         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4916         m_AfHwStateFailed = false;
4917         // If flash is enable, Flash operation is executed before triggering AF
4918         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4919                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4920                 && (m_cameraId == 0)) {
4921             ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4922             m_ctlInfo.flash.m_flashEnableFlg = true;
4923             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4924             m_ctlInfo.flash.m_flashDecisionResult = false;
4925             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4926         }
4927         break;
4928     case HAL_AFSTATE_NEEDS_DETERMINATION:
4929         nextState = NO_TRANSITION;
4930         break;
4931     case HAL_AFSTATE_PASSIVE_FOCUSED:
4932         m_IsAfLockRequired = true;
4933         if (m_AfHwStateFailed) {
4934             ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4935             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4936             nextState = HAL_AFSTATE_FAILED;
4937         }
4938         else {
4939             ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4940             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4941             nextState = HAL_AFSTATE_LOCKED;
4942         }
4943         m_AfHwStateFailed = false;
4944         break;
4945     case HAL_AFSTATE_LOCKED:
4946         nextState = NO_TRANSITION;
4947         break;
4948     case HAL_AFSTATE_FAILED:
4949         nextState = NO_TRANSITION;
4950         break;
4951     default:
4952         break;
4953     }
4954     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4955     if (nextState != NO_TRANSITION)
4956         m_afState = nextState;
4957 }
4958 
4959 
OnAfTriggerCAFVideo(int)4960 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/)
4961 {
4962     int nextState = NO_TRANSITION;
4963 
4964     switch (m_afState) {
4965     case HAL_AFSTATE_INACTIVE:
4966         nextState = HAL_AFSTATE_FAILED;
4967         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4968         break;
4969     case HAL_AFSTATE_NEEDS_COMMAND:
4970         // not used
4971         break;
4972     case HAL_AFSTATE_STARTED:
4973         m_IsAfLockRequired = true;
4974         nextState = HAL_AFSTATE_FAILED;
4975         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4976         break;
4977     case HAL_AFSTATE_SCANNING:
4978         m_IsAfLockRequired = true;
4979         nextState = HAL_AFSTATE_FAILED;
4980         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4981         break;
4982     case HAL_AFSTATE_NEEDS_DETERMINATION:
4983         // not used
4984         break;
4985     case HAL_AFSTATE_PASSIVE_FOCUSED:
4986         m_IsAfLockRequired = true;
4987         SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4988         nextState = HAL_AFSTATE_LOCKED;
4989         break;
4990     case HAL_AFSTATE_LOCKED:
4991         nextState = NO_TRANSITION;
4992         break;
4993     case HAL_AFSTATE_FAILED:
4994         nextState = NO_TRANSITION;
4995         break;
4996     default:
4997         break;
4998     }
4999     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5000     if (nextState != NO_TRANSITION)
5001         m_afState = nextState;
5002 }
5003 
OnPrecaptureMeteringNotificationSensor()5004 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
5005 {
5006     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
5007         // Just noti of pre-capture start
5008         if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
5009             m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5010                         ANDROID_CONTROL_AE_STATE_PRECAPTURE,
5011                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5012             ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5013             m_notifyCb(CAMERA2_MSG_AUTOWB,
5014                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
5015                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5016             m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
5017         }
5018     }
5019 }
5020 
OnPrecaptureMeteringNotificationISP()5021 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
5022 {
5023     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
5024         if (m_ctlInfo.flash.m_flashEnableFlg) {
5025             // flash case
5026             switch (m_ctlInfo.flash.m_flashCnt) {
5027             case IS_FLASH_STATE_AUTO_DONE:
5028             case IS_FLASH_STATE_AUTO_OFF:
5029                 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5030                     // End notification
5031                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5032                                     ANDROID_CONTROL_AE_STATE_CONVERGED,
5033                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5034                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5035                     m_notifyCb(CAMERA2_MSG_AUTOWB,
5036                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
5037                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5038                     m_ctlInfo.flash.m_precaptureTriggerId = 0;
5039                 } else {
5040                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5041                                     ANDROID_CONTROL_AE_STATE_PRECAPTURE,
5042                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5043                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5044                     m_notifyCb(CAMERA2_MSG_AUTOWB,
5045                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
5046                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5047                     m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
5048                 }
5049                 break;
5050             case IS_FLASH_STATE_CAPTURE:
5051             case IS_FLASH_STATE_CAPTURE_WAIT:
5052             case IS_FLASH_STATE_CAPTURE_JPEG:
5053             case IS_FLASH_STATE_CAPTURE_END:
5054                 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
5055                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
5056                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5057                         ANDROID_CONTROL_AE_STATE_CONVERGED,
5058                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5059                 m_notifyCb(CAMERA2_MSG_AUTOWB,
5060                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
5061                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5062                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5063                 break;
5064             }
5065         } else {
5066             // non-flash case
5067             if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5068                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5069                                 ANDROID_CONTROL_AE_STATE_CONVERGED,
5070                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5071                 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5072                 m_notifyCb(CAMERA2_MSG_AUTOWB,
5073                                 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5074                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5075                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5076             }
5077         }
5078     }
5079 }
5080 
OnAfNotification(enum aa_afstate noti)5081 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5082 {
5083     switch (m_afMode) {
5084     case AA_AFMODE_AUTO:
5085     case AA_AFMODE_MACRO:
5086         OnAfNotificationAutoMacro(noti);
5087         break;
5088     case AA_AFMODE_CONTINUOUS_VIDEO:
5089         OnAfNotificationCAFVideo(noti);
5090         break;
5091     case AA_AFMODE_CONTINUOUS_PICTURE:
5092         OnAfNotificationCAFPicture(noti);
5093         break;
5094     case AA_AFMODE_OFF:
5095     default:
5096         break;
5097     }
5098 }
5099 
OnAfNotificationAutoMacro(enum aa_afstate noti)5100 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5101 {
5102     int nextState = NO_TRANSITION;
5103     bool bWrongTransition = false;
5104 
5105     if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5106         switch (noti) {
5107         case AA_AFSTATE_INACTIVE:
5108         case AA_AFSTATE_ACTIVE_SCAN:
5109         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5110         case AA_AFSTATE_AF_FAILED_FOCUS:
5111         default:
5112             nextState = NO_TRANSITION;
5113             break;
5114         }
5115     }
5116     else if (m_afState == HAL_AFSTATE_STARTED) {
5117         switch (noti) {
5118         case AA_AFSTATE_INACTIVE:
5119             nextState = NO_TRANSITION;
5120             break;
5121         case AA_AFSTATE_ACTIVE_SCAN:
5122             nextState = HAL_AFSTATE_SCANNING;
5123             SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5124             break;
5125         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5126             nextState = NO_TRANSITION;
5127             break;
5128         case AA_AFSTATE_AF_FAILED_FOCUS:
5129             nextState = NO_TRANSITION;
5130             break;
5131         default:
5132             bWrongTransition = true;
5133             break;
5134         }
5135     }
5136     else if (m_afState == HAL_AFSTATE_SCANNING) {
5137         switch (noti) {
5138         case AA_AFSTATE_INACTIVE:
5139             bWrongTransition = true;
5140             break;
5141         case AA_AFSTATE_ACTIVE_SCAN:
5142             nextState = NO_TRANSITION;
5143             break;
5144         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5145             // If Flash mode is enable, after AF execute pre-capture metering
5146             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5147                 switch (m_ctlInfo.flash.m_flashCnt) {
5148                 case IS_FLASH_STATE_ON_DONE:
5149                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5150                     nextState = NO_TRANSITION;
5151                     break;
5152                 case IS_FLASH_STATE_AUTO_DONE:
5153                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5154                     nextState = HAL_AFSTATE_LOCKED;
5155                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5156                     break;
5157                 default:
5158                     nextState = NO_TRANSITION;
5159                 }
5160             } else {
5161                 nextState = HAL_AFSTATE_LOCKED;
5162                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5163             }
5164             break;
5165         case AA_AFSTATE_AF_FAILED_FOCUS:
5166             // If Flash mode is enable, after AF execute pre-capture metering
5167             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5168                 switch (m_ctlInfo.flash.m_flashCnt) {
5169                 case IS_FLASH_STATE_ON_DONE:
5170                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5171                     nextState = NO_TRANSITION;
5172                     break;
5173                 case IS_FLASH_STATE_AUTO_DONE:
5174                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5175                     nextState = HAL_AFSTATE_FAILED;
5176                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5177                     break;
5178                 default:
5179                     nextState = NO_TRANSITION;
5180                 }
5181             } else {
5182                 nextState = HAL_AFSTATE_FAILED;
5183                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5184             }
5185             break;
5186         default:
5187             bWrongTransition = true;
5188             break;
5189         }
5190     }
5191     else if (m_afState == HAL_AFSTATE_LOCKED) {
5192         switch (noti) {
5193             case AA_AFSTATE_INACTIVE:
5194             case AA_AFSTATE_ACTIVE_SCAN:
5195                 bWrongTransition = true;
5196                 break;
5197             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5198                 nextState = NO_TRANSITION;
5199                 break;
5200             case AA_AFSTATE_AF_FAILED_FOCUS:
5201             default:
5202                 bWrongTransition = true;
5203                 break;
5204         }
5205     }
5206     else if (m_afState == HAL_AFSTATE_FAILED) {
5207         switch (noti) {
5208             case AA_AFSTATE_INACTIVE:
5209             case AA_AFSTATE_ACTIVE_SCAN:
5210             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5211                 bWrongTransition = true;
5212                 break;
5213             case AA_AFSTATE_AF_FAILED_FOCUS:
5214                 nextState = NO_TRANSITION;
5215                 break;
5216             default:
5217                 bWrongTransition = true;
5218                 break;
5219         }
5220     }
5221     if (bWrongTransition) {
5222         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5223         return;
5224     }
5225     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5226     if (nextState != NO_TRANSITION)
5227         m_afState = nextState;
5228 }
5229 
OnAfNotificationCAFPicture(enum aa_afstate noti)5230 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5231 {
5232     int nextState = NO_TRANSITION;
5233     bool bWrongTransition = false;
5234 
5235     if (m_afState == HAL_AFSTATE_INACTIVE) {
5236         switch (noti) {
5237         case AA_AFSTATE_INACTIVE:
5238         case AA_AFSTATE_ACTIVE_SCAN:
5239         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5240         case AA_AFSTATE_AF_FAILED_FOCUS:
5241         default:
5242             nextState = NO_TRANSITION;
5243             break;
5244         }
5245         // Check AF notification after triggering
5246         if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5247             if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5248                 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5249                 SetAfMode(AA_AFMODE_OFF);
5250                 SetAfMode(m_afMode);
5251                 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5252             } else {
5253                 m_ctlInfo.af.m_afTriggerTimeOut++;
5254             }
5255         }
5256     }
5257     else if (m_afState == HAL_AFSTATE_STARTED) {
5258         switch (noti) {
5259         case AA_AFSTATE_INACTIVE:
5260             nextState = NO_TRANSITION;
5261             break;
5262         case AA_AFSTATE_ACTIVE_SCAN:
5263             nextState = HAL_AFSTATE_SCANNING;
5264             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5265             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5266             break;
5267         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5268             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5269             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5270             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5271             break;
5272         case AA_AFSTATE_AF_FAILED_FOCUS:
5273             //nextState = HAL_AFSTATE_FAILED;
5274             //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5275             nextState = NO_TRANSITION;
5276             break;
5277         default:
5278             bWrongTransition = true;
5279             break;
5280         }
5281     }
5282     else if (m_afState == HAL_AFSTATE_SCANNING) {
5283         switch (noti) {
5284         case AA_AFSTATE_INACTIVE:
5285             nextState = NO_TRANSITION;
5286             break;
5287         case AA_AFSTATE_ACTIVE_SCAN:
5288             nextState = NO_TRANSITION;
5289             m_AfHwStateFailed = false;
5290             break;
5291         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5292             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5293             m_AfHwStateFailed = false;
5294             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5295             break;
5296         case AA_AFSTATE_AF_FAILED_FOCUS:
5297             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5298             m_AfHwStateFailed = true;
5299             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5300             break;
5301         default:
5302             bWrongTransition = true;
5303             break;
5304         }
5305     }
5306     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5307         switch (noti) {
5308         case AA_AFSTATE_INACTIVE:
5309             nextState = NO_TRANSITION;
5310             break;
5311         case AA_AFSTATE_ACTIVE_SCAN:
5312             nextState = HAL_AFSTATE_SCANNING;
5313             m_AfHwStateFailed = false;
5314             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5315             break;
5316         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5317             nextState = NO_TRANSITION;
5318             m_AfHwStateFailed = false;
5319             break;
5320         case AA_AFSTATE_AF_FAILED_FOCUS:
5321             nextState = NO_TRANSITION;
5322             m_AfHwStateFailed = true;
5323             break;
5324         default:
5325             bWrongTransition = true;
5326             break;
5327         }
5328     }
5329     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5330         //Skip notification in case of flash, wait the end of flash on
5331         if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5332             if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5333                 return;
5334         }
5335         switch (noti) {
5336         case AA_AFSTATE_INACTIVE:
5337             nextState = NO_TRANSITION;
5338             break;
5339         case AA_AFSTATE_ACTIVE_SCAN:
5340             nextState = NO_TRANSITION;
5341             break;
5342         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5343             // If Flash mode is enable, after AF execute pre-capture metering
5344             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5345                 switch (m_ctlInfo.flash.m_flashCnt) {
5346                 case IS_FLASH_STATE_ON_DONE:
5347                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5348                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5349                     nextState = NO_TRANSITION;
5350                     break;
5351                 case IS_FLASH_STATE_AUTO_DONE:
5352                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5353                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5354                     m_IsAfLockRequired = true;
5355                     nextState = HAL_AFSTATE_LOCKED;
5356                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5357                     break;
5358                 default:
5359                     nextState = NO_TRANSITION;
5360                 }
5361             } else {
5362                 m_IsAfLockRequired = true;
5363                 nextState = HAL_AFSTATE_LOCKED;
5364                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5365             }
5366             break;
5367         case AA_AFSTATE_AF_FAILED_FOCUS:
5368             // If Flash mode is enable, after AF execute pre-capture metering
5369             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5370                 switch (m_ctlInfo.flash.m_flashCnt) {
5371                 case IS_FLASH_STATE_ON_DONE:
5372                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5373                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5374                     nextState = NO_TRANSITION;
5375                     break;
5376                 case IS_FLASH_STATE_AUTO_DONE:
5377                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5378                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5379                     m_IsAfLockRequired = true;
5380                     nextState = HAL_AFSTATE_FAILED;
5381                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5382                     break;
5383                 default:
5384                     nextState = NO_TRANSITION;
5385                 }
5386             } else {
5387                 m_IsAfLockRequired = true;
5388                 nextState = HAL_AFSTATE_FAILED;
5389                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5390             }
5391             break;
5392         default:
5393             bWrongTransition = true;
5394             break;
5395         }
5396     }
5397     else if (m_afState == HAL_AFSTATE_LOCKED) {
5398         switch (noti) {
5399             case AA_AFSTATE_INACTIVE:
5400                 nextState = NO_TRANSITION;
5401                 break;
5402             case AA_AFSTATE_ACTIVE_SCAN:
5403                 bWrongTransition = true;
5404                 break;
5405             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5406                 nextState = NO_TRANSITION;
5407                 break;
5408             case AA_AFSTATE_AF_FAILED_FOCUS:
5409             default:
5410                 bWrongTransition = true;
5411                 break;
5412         }
5413     }
5414     else if (m_afState == HAL_AFSTATE_FAILED) {
5415         switch (noti) {
5416             case AA_AFSTATE_INACTIVE:
5417                 bWrongTransition = true;
5418                 break;
5419             case AA_AFSTATE_ACTIVE_SCAN:
5420                 nextState = HAL_AFSTATE_SCANNING;
5421                 break;
5422             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5423                 bWrongTransition = true;
5424                 break;
5425             case AA_AFSTATE_AF_FAILED_FOCUS:
5426                 nextState = NO_TRANSITION;
5427                 break;
5428             default:
5429                 bWrongTransition = true;
5430                 break;
5431         }
5432     }
5433     if (bWrongTransition) {
5434         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5435         return;
5436     }
5437     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5438     if (nextState != NO_TRANSITION)
5439         m_afState = nextState;
5440 }
5441 
OnAfNotificationCAFVideo(enum aa_afstate noti)5442 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5443 {
5444     int nextState = NO_TRANSITION;
5445     bool bWrongTransition = false;
5446 
5447     if (m_afState == HAL_AFSTATE_INACTIVE) {
5448         switch (noti) {
5449         case AA_AFSTATE_INACTIVE:
5450         case AA_AFSTATE_ACTIVE_SCAN:
5451         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5452         case AA_AFSTATE_AF_FAILED_FOCUS:
5453         default:
5454             nextState = NO_TRANSITION;
5455             break;
5456         }
5457     }
5458     else if (m_afState == HAL_AFSTATE_STARTED) {
5459         switch (noti) {
5460         case AA_AFSTATE_INACTIVE:
5461             nextState = NO_TRANSITION;
5462             break;
5463         case AA_AFSTATE_ACTIVE_SCAN:
5464             nextState = HAL_AFSTATE_SCANNING;
5465             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5466             break;
5467         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5468             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5469             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5470             break;
5471         case AA_AFSTATE_AF_FAILED_FOCUS:
5472             nextState = HAL_AFSTATE_FAILED;
5473             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5474             break;
5475         default:
5476             bWrongTransition = true;
5477             break;
5478         }
5479     }
5480     else if (m_afState == HAL_AFSTATE_SCANNING) {
5481         switch (noti) {
5482         case AA_AFSTATE_INACTIVE:
5483             bWrongTransition = true;
5484             break;
5485         case AA_AFSTATE_ACTIVE_SCAN:
5486             nextState = NO_TRANSITION;
5487             break;
5488         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5489             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5490             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5491             break;
5492         case AA_AFSTATE_AF_FAILED_FOCUS:
5493             nextState = NO_TRANSITION;
5494             break;
5495         default:
5496             bWrongTransition = true;
5497             break;
5498         }
5499     }
5500     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5501         switch (noti) {
5502         case AA_AFSTATE_INACTIVE:
5503             bWrongTransition = true;
5504             break;
5505         case AA_AFSTATE_ACTIVE_SCAN:
5506             nextState = HAL_AFSTATE_SCANNING;
5507             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5508             break;
5509         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5510             nextState = NO_TRANSITION;
5511             break;
5512         case AA_AFSTATE_AF_FAILED_FOCUS:
5513             nextState = HAL_AFSTATE_FAILED;
5514             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5515             // TODO : needs NO_TRANSITION ?
5516             break;
5517         default:
5518             bWrongTransition = true;
5519             break;
5520         }
5521     }
5522     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5523         switch (noti) {
5524         case AA_AFSTATE_INACTIVE:
5525             bWrongTransition = true;
5526             break;
5527         case AA_AFSTATE_ACTIVE_SCAN:
5528             nextState = NO_TRANSITION;
5529             break;
5530         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5531             m_IsAfLockRequired = true;
5532             nextState = HAL_AFSTATE_LOCKED;
5533             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5534             break;
5535         case AA_AFSTATE_AF_FAILED_FOCUS:
5536             nextState = HAL_AFSTATE_FAILED;
5537             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5538             break;
5539         default:
5540             bWrongTransition = true;
5541             break;
5542         }
5543     }
5544     else if (m_afState == HAL_AFSTATE_LOCKED) {
5545         switch (noti) {
5546             case AA_AFSTATE_INACTIVE:
5547                 nextState = NO_TRANSITION;
5548                 break;
5549             case AA_AFSTATE_ACTIVE_SCAN:
5550                 bWrongTransition = true;
5551                 break;
5552             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5553                 nextState = NO_TRANSITION;
5554                 break;
5555             case AA_AFSTATE_AF_FAILED_FOCUS:
5556             default:
5557                 bWrongTransition = true;
5558                 break;
5559         }
5560     }
5561     else if (m_afState == HAL_AFSTATE_FAILED) {
5562         switch (noti) {
5563             case AA_AFSTATE_INACTIVE:
5564             case AA_AFSTATE_ACTIVE_SCAN:
5565             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5566                 bWrongTransition = true;
5567                 break;
5568             case AA_AFSTATE_AF_FAILED_FOCUS:
5569                 nextState = NO_TRANSITION;
5570                 break;
5571             default:
5572                 bWrongTransition = true;
5573                 break;
5574         }
5575     }
5576     if (bWrongTransition) {
5577         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5578         return;
5579     }
5580     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5581     if (nextState != NO_TRANSITION)
5582         m_afState = nextState;
5583 }
5584 
OnAfCancel(int id)5585 void ExynosCameraHWInterface2::OnAfCancel(int id)
5586 {
5587     m_afTriggerId = id;
5588 
5589     switch (m_afMode) {
5590     case AA_AFMODE_AUTO:
5591     case AA_AFMODE_MACRO:
5592     case AA_AFMODE_OFF:
5593     case AA_AFMODE_MANUAL:
5594         OnAfCancelAutoMacro(id);
5595         break;
5596     case AA_AFMODE_CONTINUOUS_VIDEO:
5597         OnAfCancelCAFVideo(id);
5598         break;
5599     case AA_AFMODE_CONTINUOUS_PICTURE:
5600         OnAfCancelCAFPicture(id);
5601         break;
5602     default:
5603         break;
5604     }
5605 }
5606 
OnAfCancelAutoMacro(int)5607 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/)
5608 {
5609     int nextState = NO_TRANSITION;
5610 
5611     if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5612         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5613     }
5614     switch (m_afState) {
5615     case HAL_AFSTATE_INACTIVE:
5616         nextState = NO_TRANSITION;
5617         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5618         break;
5619     case HAL_AFSTATE_NEEDS_COMMAND:
5620     case HAL_AFSTATE_STARTED:
5621     case HAL_AFSTATE_SCANNING:
5622     case HAL_AFSTATE_LOCKED:
5623     case HAL_AFSTATE_FAILED:
5624         SetAfMode(AA_AFMODE_OFF);
5625         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5626         nextState = HAL_AFSTATE_INACTIVE;
5627         break;
5628     default:
5629         break;
5630     }
5631     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5632     if (nextState != NO_TRANSITION)
5633         m_afState = nextState;
5634 }
5635 
OnAfCancelCAFPicture(int)5636 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/)
5637 {
5638     int nextState = NO_TRANSITION;
5639 
5640     switch (m_afState) {
5641     case HAL_AFSTATE_INACTIVE:
5642         nextState = NO_TRANSITION;
5643         break;
5644     case HAL_AFSTATE_NEEDS_COMMAND:
5645     case HAL_AFSTATE_STARTED:
5646     case HAL_AFSTATE_SCANNING:
5647     case HAL_AFSTATE_LOCKED:
5648     case HAL_AFSTATE_FAILED:
5649     case HAL_AFSTATE_NEEDS_DETERMINATION:
5650     case HAL_AFSTATE_PASSIVE_FOCUSED:
5651         SetAfMode(AA_AFMODE_OFF);
5652         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5653         SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5654         nextState = HAL_AFSTATE_INACTIVE;
5655         break;
5656     default:
5657         break;
5658     }
5659     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5660     if (nextState != NO_TRANSITION)
5661         m_afState = nextState;
5662 }
5663 
OnAfCancelCAFVideo(int)5664 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/)
5665 {
5666     int nextState = NO_TRANSITION;
5667 
5668     switch (m_afState) {
5669     case HAL_AFSTATE_INACTIVE:
5670         nextState = NO_TRANSITION;
5671         break;
5672     case HAL_AFSTATE_NEEDS_COMMAND:
5673     case HAL_AFSTATE_STARTED:
5674     case HAL_AFSTATE_SCANNING:
5675     case HAL_AFSTATE_LOCKED:
5676     case HAL_AFSTATE_FAILED:
5677     case HAL_AFSTATE_NEEDS_DETERMINATION:
5678     case HAL_AFSTATE_PASSIVE_FOCUSED:
5679         SetAfMode(AA_AFMODE_OFF);
5680         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5681         SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5682         nextState = HAL_AFSTATE_INACTIVE;
5683         break;
5684     default:
5685         break;
5686     }
5687     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5688     if (nextState != NO_TRANSITION)
5689         m_afState = nextState;
5690 }
5691 
SetAfStateForService(int newState)5692 void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5693 {
5694     if (m_serviceAfState != newState || newState == 0)
5695         m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5696     m_serviceAfState = newState;
5697 }
5698 
GetAfStateForService()5699 int ExynosCameraHWInterface2::GetAfStateForService()
5700 {
5701    return m_serviceAfState;
5702 }
5703 
SetAfMode(enum aa_afmode afMode)5704 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5705 {
5706     if (m_afMode != afMode) {
5707         if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
5708             m_afMode2 = afMode;
5709             ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5710         }
5711         else {
5712             ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5713             m_IsAfModeUpdateRequired = true;
5714             m_afMode = afMode;
5715             SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5716             m_afState = HAL_AFSTATE_INACTIVE;
5717         }
5718     }
5719 }
5720 
m_setExifFixedAttribute(void)5721 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5722 {
5723     char property[PROPERTY_VALUE_MAX];
5724 
5725     //2 0th IFD TIFF Tags
5726     //3 Maker
5727     property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5728     strncpy((char *)mExifInfo.maker, property,
5729                 sizeof(mExifInfo.maker) - 1);
5730     mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5731     //3 Model
5732     property_get("ro.product.model", property, EXIF_DEF_MODEL);
5733     strncpy((char *)mExifInfo.model, property,
5734                 sizeof(mExifInfo.model) - 1);
5735     mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5736     //3 Software
5737     property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5738     strncpy((char *)mExifInfo.software, property,
5739                 sizeof(mExifInfo.software) - 1);
5740     mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5741 
5742     //3 YCbCr Positioning
5743     mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5744 
5745     //2 0th IFD Exif Private Tags
5746     //3 F Number
5747     mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5748     mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5749     //3 Exposure Program
5750     mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5751     //3 Exif Version
5752     memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5753     //3 Aperture
5754     double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5755     mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5756     mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5757     //3 Maximum lens aperture
5758     mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5759     mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5760     //3 Lens Focal Length
5761     mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5762 
5763     mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5764     //3 User Comments
5765     strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5766     //3 Color Space information
5767     mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5768     //3 Exposure Mode
5769     mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5770 
5771     //2 0th IFD GPS Info Tags
5772     unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5773     memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5774 
5775     //2 1th IFD TIFF Tags
5776     mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5777     mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5778     mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5779     mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5780     mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5781     mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5782 }
5783 
m_setExifChangedAttribute(exif_attribute_t * exifInfo,ExynosRect * rect,camera2_shot_ext * currentEntry)5784 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5785         camera2_shot_ext *currentEntry)
5786 {
5787     camera2_dm *dm = &(currentEntry->shot.dm);
5788     camera2_ctl *ctl = &(currentEntry->shot.ctl);
5789 
5790     ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5791     if (!ctl->request.frameCount)
5792        return;
5793     //2 0th IFD TIFF Tags
5794     //3 Width
5795     exifInfo->width = rect->w;
5796     //3 Height
5797     exifInfo->height = rect->h;
5798     //3 Orientation
5799     switch (ctl->jpeg.orientation) {
5800     case 90:
5801         exifInfo->orientation = EXIF_ORIENTATION_90;
5802         break;
5803     case 180:
5804         exifInfo->orientation = EXIF_ORIENTATION_180;
5805         break;
5806     case 270:
5807         exifInfo->orientation = EXIF_ORIENTATION_270;
5808         break;
5809     case 0:
5810     default:
5811         exifInfo->orientation = EXIF_ORIENTATION_UP;
5812         break;
5813     }
5814 
5815     //3 Date time
5816     struct timeval rawtime;
5817     struct tm *timeinfo;
5818     gettimeofday(&rawtime, NULL);
5819     timeinfo = localtime(&rawtime.tv_sec);
5820     strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5821     snprintf((char *)exifInfo->sub_sec, sizeof(exifInfo->sub_sec), "%03lu",
5822         (unsigned long)rawtime.tv_usec / 1000UL);
5823 
5824     //2 0th IFD Exif Private Tags
5825     //3 Exposure Time
5826     int shutterSpeed = (dm->sensor.exposureTime/1000);
5827 
5828     // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5829     if (shutterSpeed > 500000)
5830         shutterSpeed -=  100000;
5831 
5832     if (shutterSpeed < 0) {
5833         shutterSpeed = 100;
5834     }
5835 
5836     exifInfo->exposure_time.num = 1;
5837     // x us -> 1/x s */
5838     //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5839     exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5840 
5841     //3 ISO Speed Rating
5842     exifInfo->iso_speed_rating = dm->aa.isoValue;
5843 
5844     uint32_t av, tv, bv, sv, ev;
5845     av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5846     tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5847     sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5848     bv = av + tv - sv;
5849     ev = av + tv;
5850     //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5851     ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5852 
5853     //3 Shutter Speed
5854     exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5855     exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5856     //3 Brightness
5857     exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5858     exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5859     //3 Exposure Bias
5860     if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5861         ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5862         exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5863         exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5864     } else {
5865         exifInfo->exposure_bias.num = 0;
5866         exifInfo->exposure_bias.den = 0;
5867     }
5868     //3 Metering Mode
5869     /*switch (m_curCameraInfo->metering) {
5870     case METERING_MODE_CENTER:
5871         exifInfo->metering_mode = EXIF_METERING_CENTER;
5872         break;
5873     case METERING_MODE_MATRIX:
5874         exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5875         break;
5876     case METERING_MODE_SPOT:
5877         exifInfo->metering_mode = EXIF_METERING_SPOT;
5878         break;
5879     case METERING_MODE_AVERAGE:
5880     default:
5881         exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5882         break;
5883     }*/
5884     exifInfo->metering_mode = EXIF_METERING_CENTER;
5885 
5886     //3 Flash
5887     if (m_ctlInfo.flash.m_flashDecisionResult)
5888         exifInfo->flash = 1;
5889     else
5890         exifInfo->flash = EXIF_DEF_FLASH;
5891 
5892     //3 White Balance
5893     if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5894         exifInfo->white_balance = EXIF_WB_AUTO;
5895     else
5896         exifInfo->white_balance = EXIF_WB_MANUAL;
5897 
5898     //3 Scene Capture Type
5899     switch (ctl->aa.sceneMode) {
5900     case AA_SCENE_MODE_PORTRAIT:
5901         exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5902         break;
5903     case AA_SCENE_MODE_LANDSCAPE:
5904         exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5905         break;
5906     case AA_SCENE_MODE_NIGHT_PORTRAIT:
5907         exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5908         break;
5909     default:
5910         exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5911         break;
5912     }
5913 
5914     //2 0th IFD GPS Info Tags
5915     if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5916 
5917         if (ctl->jpeg.gpsCoordinates[0] > 0)
5918             strcpy((char *)exifInfo->gps_latitude_ref, "N");
5919         else
5920             strcpy((char *)exifInfo->gps_latitude_ref, "S");
5921 
5922         if (ctl->jpeg.gpsCoordinates[1] > 0)
5923             strcpy((char *)exifInfo->gps_longitude_ref, "E");
5924         else
5925             strcpy((char *)exifInfo->gps_longitude_ref, "W");
5926 
5927         if (ctl->jpeg.gpsCoordinates[2] > 0)
5928             exifInfo->gps_altitude_ref = 0;
5929         else
5930             exifInfo->gps_altitude_ref = 1;
5931 
5932         double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5933         double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5934         double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5935 
5936         exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5937         exifInfo->gps_latitude[0].den = 1;
5938         exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5939         exifInfo->gps_latitude[1].den = 1;
5940         exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
5941                                         - exifInfo->gps_latitude[1].num) * 60);
5942         exifInfo->gps_latitude[2].den = 1;
5943 
5944         exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5945         exifInfo->gps_longitude[0].den = 1;
5946         exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5947         exifInfo->gps_longitude[1].den = 1;
5948         exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
5949                                         - exifInfo->gps_longitude[1].num) * 60);
5950         exifInfo->gps_longitude[2].den = 1;
5951 
5952         exifInfo->gps_altitude.num = (uint32_t)round(altitude);
5953         exifInfo->gps_altitude.den = 1;
5954 
5955         struct tm tm_data;
5956         long timestamp;
5957         timestamp = (long)ctl->jpeg.gpsTimestamp;
5958         gmtime_r(&timestamp, &tm_data);
5959         exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5960         exifInfo->gps_timestamp[0].den = 1;
5961         exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5962         exifInfo->gps_timestamp[1].den = 1;
5963         exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5964         exifInfo->gps_timestamp[2].den = 1;
5965         snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5966                 "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5967 
5968         memset(exifInfo->gps_processing_method, 0, 100);
5969         memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5970         exifInfo->enableGps = true;
5971     } else {
5972         exifInfo->enableGps = false;
5973     }
5974 
5975     //2 1th IFD TIFF Tags
5976     exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5977     exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5978 }
5979 
~MainThread()5980 ExynosCameraHWInterface2::MainThread::~MainThread()
5981 {
5982     ALOGV("(%s):", __FUNCTION__);
5983 }
5984 
release()5985 void ExynosCameraHWInterface2::MainThread::release()
5986 {
5987     ALOGV("(%s):", __func__);
5988     SetSignal(SIGNAL_THREAD_RELEASE);
5989 }
5990 
~SensorThread()5991 ExynosCameraHWInterface2::SensorThread::~SensorThread()
5992 {
5993     ALOGV("(%s):", __FUNCTION__);
5994 }
5995 
release()5996 void ExynosCameraHWInterface2::SensorThread::release()
5997 {
5998     ALOGV("(%s):", __func__);
5999     SetSignal(SIGNAL_THREAD_RELEASE);
6000 }
6001 
~StreamThread()6002 ExynosCameraHWInterface2::StreamThread::~StreamThread()
6003 {
6004     ALOGV("(%s):", __FUNCTION__);
6005 }
6006 
setParameter(stream_parameters_t * new_parameters)6007 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
6008 {
6009     ALOGV("DEBUG(%s):", __FUNCTION__);
6010     memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
6011 }
6012 
release()6013 void ExynosCameraHWInterface2::StreamThread::release()
6014 {
6015     ALOGV("(%s):", __func__);
6016     SetSignal(SIGNAL_THREAD_RELEASE);
6017 }
6018 
findBufferIndex(void * bufAddr)6019 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
6020 {
6021     int index;
6022     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6023         if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
6024             return index;
6025     }
6026     return -1;
6027 }
6028 
findBufferIndex(buffer_handle_t * bufHandle)6029 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
6030 {
6031     int index;
6032     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6033         if (m_parameters.svcBufHandle[index] == *bufHandle)
6034             return index;
6035     }
6036     return -1;
6037 }
6038 
attachSubStream(int stream_id,int priority)6039 status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
6040 {
6041     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6042     int index, vacantIndex;
6043     bool vacancy = false;
6044 
6045     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6046         if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
6047             vacancy = true;
6048             vacantIndex = index;
6049         } else if (m_attachedSubStreams[index].streamId == stream_id) {
6050             return BAD_VALUE;
6051         }
6052     }
6053     if (!vacancy)
6054         return NO_MEMORY;
6055     m_attachedSubStreams[vacantIndex].streamId = stream_id;
6056     m_attachedSubStreams[vacantIndex].priority = priority;
6057     m_numRegisteredStream++;
6058     return NO_ERROR;
6059 }
6060 
detachSubStream(int stream_id)6061 status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
6062 {
6063     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6064     int index;
6065     bool found = false;
6066 
6067     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6068         if (m_attachedSubStreams[index].streamId == stream_id) {
6069             found = true;
6070             break;
6071         }
6072     }
6073     if (!found)
6074         return BAD_VALUE;
6075     m_attachedSubStreams[index].streamId = -1;
6076     m_attachedSubStreams[index].priority = 0;
6077     m_numRegisteredStream--;
6078     return NO_ERROR;
6079 }
6080 
createIonClient(ion_client ionClient)6081 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6082 {
6083     if (ionClient == 0) {
6084         ionClient = ion_client_create();
6085         if (ionClient < 0) {
6086             ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6087             return 0;
6088         }
6089     }
6090     return ionClient;
6091 }
6092 
deleteIonClient(ion_client ionClient)6093 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6094 {
6095     if (ionClient != 0) {
6096         if (ionClient > 0) {
6097             ion_client_destroy(ionClient);
6098         }
6099         ionClient = 0;
6100     }
6101     return ionClient;
6102 }
6103 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum)6104 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6105 {
6106     return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6107 }
6108 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum,int cacheFlag)6109 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6110 {
6111     int ret = 0;
6112     int i = 0;
6113     int flag = 0;
6114 
6115     if (ionClient == 0) {
6116         ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6117         return -1;
6118     }
6119 
6120     for (i = 0 ; i < iMemoryNum ; i++) {
6121         if (buf->size.extS[i] == 0) {
6122             break;
6123         }
6124         if (1 << i & cacheFlag)
6125             flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC;
6126         else
6127             flag = 0;
6128         buf->fd.extFd[i] = ion_alloc(ionClient, \
6129                                       buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag);
6130         if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6131             ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6132             buf->fd.extFd[i] = -1;
6133             freeCameraMemory(buf, iMemoryNum);
6134             return -1;
6135         }
6136 
6137         buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6138                                         buf->size.extS[i], 0);
6139         if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6140             ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6141             buf->virt.extP[i] = (char *)MAP_FAILED;
6142             freeCameraMemory(buf, iMemoryNum);
6143             return -1;
6144         }
6145         ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6146     }
6147 
6148     return ret;
6149 }
6150 
freeCameraMemory(ExynosBuffer * buf,int iMemoryNum)6151 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6152 {
6153 
6154     int i = 0 ;
6155     int ret = 0;
6156 
6157     for (i=0;i<iMemoryNum;i++) {
6158         if (buf->fd.extFd[i] != -1) {
6159             if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6160                 ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6161                 if (ret < 0)
6162                     ALOGE("ERR(%s)", __FUNCTION__);
6163             }
6164             ion_free(buf->fd.extFd[i]);
6165         ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6166         }
6167         buf->fd.extFd[i] = -1;
6168         buf->virt.extP[i] = (char *)MAP_FAILED;
6169         buf->size.extS[i] = 0;
6170     }
6171 }
6172 
initCameraMemory(ExynosBuffer * buf,int iMemoryNum)6173 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6174 {
6175     int i =0 ;
6176     for (i=0;i<iMemoryNum;i++) {
6177         buf->virt.extP[i] = (char *)MAP_FAILED;
6178         buf->fd.extFd[i] = -1;
6179         buf->size.extS[i] = 0;
6180     }
6181 }
6182 
6183 
6184 
6185 
6186 static camera2_device_t *g_cam2_device = NULL;
6187 static bool g_camera_vaild = false;
6188 static Mutex g_camera_mutex;
6189 ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6190 
HAL2_camera_device_close(struct hw_device_t * device)6191 static int HAL2_camera_device_close(struct hw_device_t* device)
6192 {
6193     Mutex::Autolock lock(g_camera_mutex);
6194     ALOGD("(%s): ENTER", __FUNCTION__);
6195     if (device) {
6196 
6197         camera2_device_t *cam_device = (camera2_device_t *)device;
6198         ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6199         ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6200         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6201         free(cam_device);
6202         g_camera_vaild = false;
6203         g_cam2_device = NULL;
6204     }
6205 
6206     ALOGD("(%s): EXIT", __FUNCTION__);
6207     return 0;
6208 }
6209 
obj(const struct camera2_device * dev)6210 static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6211 {
6212     return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6213 }
6214 
HAL2_device_set_request_queue_src_ops(const struct camera2_device * dev,const camera2_request_queue_src_ops_t * request_src_ops)6215 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6216             const camera2_request_queue_src_ops_t *request_src_ops)
6217 {
6218     ALOGV("DEBUG(%s):", __FUNCTION__);
6219     return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6220 }
6221 
HAL2_device_notify_request_queue_not_empty(const struct camera2_device * dev)6222 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6223 {
6224     ALOGV("DEBUG(%s):", __FUNCTION__);
6225     return obj(dev)->notifyRequestQueueNotEmpty();
6226 }
6227 
HAL2_device_set_frame_queue_dst_ops(const struct camera2_device * dev,const camera2_frame_queue_dst_ops_t * frame_dst_ops)6228 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6229             const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6230 {
6231     ALOGV("DEBUG(%s):", __FUNCTION__);
6232     return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6233 }
6234 
HAL2_device_get_in_progress_count(const struct camera2_device * dev)6235 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6236 {
6237     ALOGV("DEBUG(%s):", __FUNCTION__);
6238     return obj(dev)->getInProgressCount();
6239 }
6240 
HAL2_device_flush_captures_in_progress(const struct camera2_device * dev)6241 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6242 {
6243     ALOGV("DEBUG(%s):", __FUNCTION__);
6244     return obj(dev)->flushCapturesInProgress();
6245 }
6246 
HAL2_device_construct_default_request(const struct camera2_device * dev,int request_template,camera_metadata_t ** request)6247 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6248             int request_template, camera_metadata_t **request)
6249 {
6250     ALOGV("DEBUG(%s):", __FUNCTION__);
6251     return obj(dev)->constructDefaultRequest(request_template, request);
6252 }
6253 
HAL2_device_allocate_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)6254 static int HAL2_device_allocate_stream(
6255             const struct camera2_device *dev,
6256             // inputs
6257             uint32_t width,
6258             uint32_t height,
6259             int      format,
6260             const camera2_stream_ops_t *stream_ops,
6261             // outputs
6262             uint32_t *stream_id,
6263             uint32_t *format_actual,
6264             uint32_t *usage,
6265             uint32_t *max_buffers)
6266 {
6267     ALOGV("(%s): ", __FUNCTION__);
6268     return obj(dev)->allocateStream(width, height, format, stream_ops,
6269                                     stream_id, format_actual, usage, max_buffers);
6270 }
6271 
HAL2_device_register_stream_buffers(const struct camera2_device * dev,uint32_t stream_id,int num_buffers,buffer_handle_t * buffers)6272 static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6273             uint32_t stream_id,
6274             int num_buffers,
6275             buffer_handle_t *buffers)
6276 {
6277     ALOGV("DEBUG(%s):", __FUNCTION__);
6278     return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6279 }
6280 
HAL2_device_release_stream(const struct camera2_device * dev,uint32_t stream_id)6281 static int HAL2_device_release_stream(
6282         const struct camera2_device *dev,
6283             uint32_t stream_id)
6284 {
6285     ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6286     if (!g_camera_vaild)
6287         return 0;
6288     return obj(dev)->releaseStream(stream_id);
6289 }
6290 
HAL2_device_allocate_reprocess_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,uint32_t format,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id,uint32_t * consumer_usage,uint32_t * max_buffers)6291 static int HAL2_device_allocate_reprocess_stream(
6292            const struct camera2_device *dev,
6293             uint32_t width,
6294             uint32_t height,
6295             uint32_t format,
6296             const camera2_stream_in_ops_t *reprocess_stream_ops,
6297             // outputs
6298             uint32_t *stream_id,
6299             uint32_t *consumer_usage,
6300             uint32_t *max_buffers)
6301 {
6302     ALOGV("DEBUG(%s):", __FUNCTION__);
6303     return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6304                                     stream_id, consumer_usage, max_buffers);
6305 }
6306 
HAL2_device_allocate_reprocess_stream_from_stream(const struct camera2_device * dev,uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)6307 static int HAL2_device_allocate_reprocess_stream_from_stream(
6308            const struct camera2_device *dev,
6309             uint32_t output_stream_id,
6310             const camera2_stream_in_ops_t *reprocess_stream_ops,
6311             // outputs
6312             uint32_t *stream_id)
6313 {
6314     ALOGV("DEBUG(%s):", __FUNCTION__);
6315     return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6316                                     reprocess_stream_ops, stream_id);
6317 }
6318 
HAL2_device_release_reprocess_stream(const struct camera2_device * dev,uint32_t stream_id)6319 static int HAL2_device_release_reprocess_stream(
6320         const struct camera2_device *dev,
6321             uint32_t stream_id)
6322 {
6323     ALOGV("DEBUG(%s):", __FUNCTION__);
6324     return obj(dev)->releaseReprocessStream(stream_id);
6325 }
6326 
HAL2_device_trigger_action(const struct camera2_device * dev,uint32_t trigger_id,int ext1,int ext2)6327 static int HAL2_device_trigger_action(const struct camera2_device *dev,
6328            uint32_t trigger_id,
6329             int ext1,
6330             int ext2)
6331 {
6332     ALOGV("DEBUG(%s):", __FUNCTION__);
6333     if (!g_camera_vaild)
6334         return 0;
6335     return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6336 }
6337 
HAL2_device_set_notify_callback(const struct camera2_device * dev,camera2_notify_callback notify_cb,void * user)6338 static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6339             camera2_notify_callback notify_cb,
6340             void *user)
6341 {
6342     ALOGV("DEBUG(%s):", __FUNCTION__);
6343     return obj(dev)->setNotifyCallback(notify_cb, user);
6344 }
6345 
HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device * dev,vendor_tag_query_ops_t ** ops)6346 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6347             vendor_tag_query_ops_t **ops)
6348 {
6349     ALOGV("DEBUG(%s):", __FUNCTION__);
6350     return obj(dev)->getMetadataVendorTagOps(ops);
6351 }
6352 
HAL2_device_dump(const struct camera2_device * dev,int fd)6353 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6354 {
6355     ALOGV("DEBUG(%s):", __FUNCTION__);
6356     return obj(dev)->dump(fd);
6357 }
6358 
6359 
6360 
6361 
6362 
HAL2_getNumberOfCameras()6363 static int HAL2_getNumberOfCameras()
6364 {
6365     ALOGV("(%s): returning 2", __FUNCTION__);
6366     return 2;
6367 }
6368 
6369 
HAL2_getCameraInfo(int cameraId,struct camera_info * info)6370 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6371 {
6372     ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6373     static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6374 
6375     status_t res;
6376 
6377     if (cameraId == 0) {
6378         info->facing = CAMERA_FACING_BACK;
6379         if (!g_camera2[0])
6380             g_camera2[0] = new ExynosCamera2(0);
6381     }
6382     else if (cameraId == 1) {
6383         info->facing = CAMERA_FACING_FRONT;
6384         if (!g_camera2[1])
6385             g_camera2[1] = new ExynosCamera2(1);
6386     }
6387     else
6388         return BAD_VALUE;
6389 
6390     info->orientation = 0;
6391     info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6392     if (mCameraInfo[cameraId] == NULL) {
6393         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6394         if (res != OK) {
6395             ALOGE("%s: Unable to allocate static info: %s (%d)",
6396                     __FUNCTION__, strerror(-res), res);
6397             return res;
6398         }
6399         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6400         if (res != OK) {
6401             ALOGE("%s: Unable to fill in static info: %s (%d)",
6402                     __FUNCTION__, strerror(-res), res);
6403             return res;
6404         }
6405     }
6406     info->static_camera_characteristics = mCameraInfo[cameraId];
6407     return NO_ERROR;
6408 }
6409 
6410 #define SET_METHOD(m) m : HAL2_device_##m
6411 
6412 static camera2_device_ops_t camera2_device_ops = {
6413         SET_METHOD(set_request_queue_src_ops),
6414         SET_METHOD(notify_request_queue_not_empty),
6415         SET_METHOD(set_frame_queue_dst_ops),
6416         SET_METHOD(get_in_progress_count),
6417         SET_METHOD(flush_captures_in_progress),
6418         SET_METHOD(construct_default_request),
6419         SET_METHOD(allocate_stream),
6420         SET_METHOD(register_stream_buffers),
6421         SET_METHOD(release_stream),
6422         SET_METHOD(allocate_reprocess_stream),
6423         SET_METHOD(allocate_reprocess_stream_from_stream),
6424         SET_METHOD(release_reprocess_stream),
6425         SET_METHOD(trigger_action),
6426         SET_METHOD(set_notify_callback),
6427         SET_METHOD(get_metadata_vendor_tag_ops),
6428         SET_METHOD(dump),
6429         get_instance_metadata : NULL
6430 };
6431 
6432 #undef SET_METHOD
6433 
6434 
HAL2_camera_device_open(const struct hw_module_t * module,const char * id,struct hw_device_t ** device)6435 static int HAL2_camera_device_open(const struct hw_module_t* module,
6436                                   const char *id,
6437                                   struct hw_device_t** device)
6438 {
6439     int cameraId = atoi(id);
6440     int openInvalid = 0;
6441 
6442     Mutex::Autolock lock(g_camera_mutex);
6443     if (g_camera_vaild) {
6444         ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6445         return -EUSERS;
6446     }
6447     g_camera_vaild = false;
6448     ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6449     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6450         ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6451         return -EINVAL;
6452     }
6453 
6454     ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6455     if (g_cam2_device) {
6456         if (obj(g_cam2_device)->getCameraId() == cameraId) {
6457             ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6458             goto done;
6459         } else {
6460             ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6461             while (g_cam2_device)
6462                 usleep(SIG_WAITING_TICK);
6463             ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6464         }
6465     }
6466 
6467     g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6468     ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6469 
6470     if (!g_cam2_device)
6471         return -ENOMEM;
6472 
6473     g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6474     g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6475     g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6476     g_cam2_device->common.close   = HAL2_camera_device_close;
6477 
6478     g_cam2_device->ops = &camera2_device_ops;
6479 
6480     ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6481 
6482     g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6483     if (!openInvalid) {
6484         ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6485         return -ENODEV;
6486     }
6487 done:
6488     *device = (hw_device_t *)g_cam2_device;
6489     ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6490     g_camera_vaild = true;
6491 
6492     return 0;
6493 }
6494 
6495 
6496 static hw_module_methods_t camera_module_methods = {
6497             open : HAL2_camera_device_open
6498 };
6499 
6500 extern "C" {
6501     struct camera_module HAL_MODULE_INFO_SYM = {
6502       common : {
6503           tag                : HARDWARE_MODULE_TAG,
6504           module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6505           hal_api_version    : HARDWARE_HAL_API_VERSION,
6506           id                 : CAMERA_HARDWARE_MODULE_ID,
6507           name               : "Exynos Camera HAL2",
6508           author             : "Samsung Corporation",
6509           methods            : &camera_module_methods,
6510           dso                : NULL,
6511           reserved           : {0},
6512       },
6513       get_number_of_cameras : HAL2_getNumberOfCameras,
6514       get_camera_info       : HAL2_getCameraInfo,
6515       set_callbacks         : NULL,
6516       get_vendor_tag_ops    : NULL,
6517       open_legacy           : NULL,
6518       reserved              : {0}
6519     };
6520 }
6521 
6522 }; // namespace android
6523