1 /*
2  * Copyright (C) 2013-2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 #include "Camera3OutputStream.h"
24 #include "utils/TraceHFR.h"
25 
26 #ifndef container_of
27 #define container_of(ptr, type, member) \
28     (type *)((char*)(ptr) - offsetof(type, member))
29 #endif
30 
31 namespace android {
32 
33 namespace camera3 {
34 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)35 Camera3OutputStream::Camera3OutputStream(int id,
36         sp<Surface> consumer,
37         uint32_t width, uint32_t height, int format,
38         android_dataspace dataSpace, camera3_stream_rotation_t rotation,
39         nsecs_t timestampOffset, const String8& physicalCameraId,
40         int setId) :
41         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
42                             /*maxSize*/0, format, dataSpace, rotation,
43                             physicalCameraId, setId),
44         mConsumer(consumer),
45         mTransform(0),
46         mTraceFirstBuffer(true),
47         mUseBufferManager(false),
48         mTimestampOffset(timestampOffset),
49         mConsumerUsage(0),
50         mDropBuffers(false),
51         mDequeueBufferLatency(kDequeueLatencyBinSize) {
52 
53     if (mConsumer == NULL) {
54         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
55         mState = STATE_ERROR;
56     }
57 
58     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
59     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
60 }
61 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)62 Camera3OutputStream::Camera3OutputStream(int id,
63         sp<Surface> consumer,
64         uint32_t width, uint32_t height, size_t maxSize, int format,
65         android_dataspace dataSpace, camera3_stream_rotation_t rotation,
66         nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
67         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
68                             format, dataSpace, rotation, physicalCameraId, setId),
69         mConsumer(consumer),
70         mTransform(0),
71         mTraceFirstBuffer(true),
72         mUseMonoTimestamp(false),
73         mUseBufferManager(false),
74         mTimestampOffset(timestampOffset),
75         mConsumerUsage(0),
76         mDropBuffers(false),
77         mDequeueBufferLatency(kDequeueLatencyBinSize) {
78 
79     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
80         ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
81                 format);
82         mState = STATE_ERROR;
83     }
84 
85     if (mConsumer == NULL) {
86         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
87         mState = STATE_ERROR;
88     }
89 
90     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
91     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
92 }
93 
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)94 Camera3OutputStream::Camera3OutputStream(int id,
95         uint32_t width, uint32_t height, int format,
96         uint64_t consumerUsage, android_dataspace dataSpace,
97         camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
98         const String8& physicalCameraId, int setId) :
99         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
100                             /*maxSize*/0, format, dataSpace, rotation,
101                             physicalCameraId, setId),
102         mConsumer(nullptr),
103         mTransform(0),
104         mTraceFirstBuffer(true),
105         mUseBufferManager(false),
106         mTimestampOffset(timestampOffset),
107         mConsumerUsage(consumerUsage),
108         mDropBuffers(false),
109         mDequeueBufferLatency(kDequeueLatencyBinSize) {
110     // Deferred consumer only support preview surface format now.
111     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
112         ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
113                 __FUNCTION__);
114         mState = STATE_ERROR;
115     }
116 
117     // Sanity check for the consumer usage flag.
118     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
119             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
120         ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
121               __FUNCTION__, consumerUsage);
122         mState = STATE_ERROR;
123     }
124 
125     mConsumerName = String8("Deferred");
126     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
127     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
128 }
129 
Camera3OutputStream(int id,camera3_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,const String8 & physicalCameraId,uint64_t consumerUsage,nsecs_t timestampOffset,int setId)130 Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
131                                          uint32_t width, uint32_t height,
132                                          int format,
133                                          android_dataspace dataSpace,
134                                          camera3_stream_rotation_t rotation,
135                                          const String8& physicalCameraId,
136                                          uint64_t consumerUsage, nsecs_t timestampOffset,
137                                          int setId) :
138         Camera3IOStreamBase(id, type, width, height,
139                             /*maxSize*/0,
140                             format, dataSpace, rotation,
141                             physicalCameraId, setId),
142         mTransform(0),
143         mTraceFirstBuffer(true),
144         mUseMonoTimestamp(false),
145         mUseBufferManager(false),
146         mTimestampOffset(timestampOffset),
147         mConsumerUsage(consumerUsage),
148         mDropBuffers(false),
149         mDequeueBufferLatency(kDequeueLatencyBinSize) {
150 
151     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
152     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
153 
154     // Subclasses expected to initialize mConsumer themselves
155 }
156 
157 
~Camera3OutputStream()158 Camera3OutputStream::~Camera3OutputStream() {
159     disconnectLocked();
160 }
161 
getBufferLocked(camera3_stream_buffer * buffer,const std::vector<size_t> &)162 status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
163         const std::vector<size_t>&) {
164     ATRACE_HFR_CALL();
165 
166     ANativeWindowBuffer* anb;
167     int fenceFd = -1;
168 
169     status_t res;
170     res = getBufferLockedCommon(&anb, &fenceFd);
171     if (res != OK) {
172         return res;
173     }
174 
175     /**
176      * FenceFD now owned by HAL except in case of error,
177      * in which case we reassign it to acquire_fence
178      */
179     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
180                         /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
181 
182     return OK;
183 }
184 
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)185 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
186             ANativeWindowBuffer* buffer, int anwReleaseFence,
187             const std::vector<size_t>&) {
188     return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
189 }
190 
returnBufferLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,const std::vector<size_t> & surface_ids)191 status_t Camera3OutputStream::returnBufferLocked(
192         const camera3_stream_buffer &buffer,
193         nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
194     ATRACE_HFR_CALL();
195 
196     status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
197 
198     if (res != OK) {
199         return res;
200     }
201 
202     mLastTimestamp = timestamp;
203     mFrameCount++;
204 
205     return OK;
206 }
207 
returnBufferCheckedLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,bool output,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)208 status_t Camera3OutputStream::returnBufferCheckedLocked(
209             const camera3_stream_buffer &buffer,
210             nsecs_t timestamp,
211             bool output,
212             const std::vector<size_t>& surface_ids,
213             /*out*/
214             sp<Fence> *releaseFenceOut) {
215 
216     (void)output;
217     ALOG_ASSERT(output, "Expected output to be true");
218 
219     status_t res;
220 
221     // Fence management - always honor release fence from HAL
222     sp<Fence> releaseFence = new Fence(buffer.release_fence);
223     int anwReleaseFence = releaseFence->dup();
224 
225     /**
226      * Release the lock briefly to avoid deadlock with
227      * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
228      * thread will go into StreamingProcessor::onFrameAvailable) during
229      * queueBuffer
230      */
231     sp<ANativeWindow> currentConsumer = mConsumer;
232     StreamState state = mState;
233     mLock.unlock();
234 
235     ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
236     /**
237      * Return buffer back to ANativeWindow
238      */
239     if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
240         // Cancel buffer
241         if (mDropBuffers) {
242             ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
243         } else if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
244             ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
245         } else {
246             ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
247         }
248 
249         res = currentConsumer->cancelBuffer(currentConsumer.get(),
250                 anwBuffer,
251                 anwReleaseFence);
252         if (shouldLogError(res, state)) {
253             ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
254                   " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
255         }
256 
257         notifyBufferReleased(anwBuffer);
258         if (mUseBufferManager) {
259             // Return this buffer back to buffer manager.
260             mBufferProducerListener->onBufferReleased();
261         }
262     } else {
263         if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
264             {
265                 char traceLog[48];
266                 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
267                 ATRACE_NAME(traceLog);
268             }
269             mTraceFirstBuffer = false;
270         }
271 
272         /* Certain consumers (such as AudioSource or HardwareComposer) use
273          * MONOTONIC time, causing time misalignment if camera timestamp is
274          * in BOOTTIME. Do the conversion if necessary. */
275         res = native_window_set_buffers_timestamp(mConsumer.get(),
276                 mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
277         if (res != OK) {
278             ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
279                   __FUNCTION__, mId, strerror(-res), res);
280             return res;
281         }
282 
283         res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
284         if (shouldLogError(res, state)) {
285             ALOGE("%s: Stream %d: Error queueing buffer to native window:"
286                   " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
287         }
288     }
289     mLock.lock();
290 
291     // Once a valid buffer has been returned to the queue, can no longer
292     // dequeue all buffers for preallocation.
293     if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {
294         mStreamUnpreparable = true;
295     }
296 
297     if (res != OK) {
298         close(anwReleaseFence);
299     }
300 
301     *releaseFenceOut = releaseFence;
302 
303     return res;
304 }
305 
dump(int fd,const Vector<String16> & args) const306 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
307     (void) args;
308     String8 lines;
309     lines.appendFormat("    Stream[%d]: Output\n", mId);
310     lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
311     write(fd, lines.string(), lines.size());
312 
313     Camera3IOStreamBase::dump(fd, args);
314 
315     mDequeueBufferLatency.dump(fd,
316         "      DequeueBuffer latency histogram:");
317 }
318 
setTransform(int transform)319 status_t Camera3OutputStream::setTransform(int transform) {
320     ATRACE_CALL();
321     Mutex::Autolock l(mLock);
322     return setTransformLocked(transform);
323 }
324 
setTransformLocked(int transform)325 status_t Camera3OutputStream::setTransformLocked(int transform) {
326     status_t res = OK;
327     if (mState == STATE_ERROR) {
328         ALOGE("%s: Stream in error state", __FUNCTION__);
329         return INVALID_OPERATION;
330     }
331 
332     mTransform = transform;
333     if (mState == STATE_CONFIGURED) {
334         res = native_window_set_buffers_transform(mConsumer.get(),
335                 transform);
336         if (res != OK) {
337             ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
338                     __FUNCTION__, transform, strerror(-res), res);
339         }
340     }
341     return res;
342 }
343 
configureQueueLocked()344 status_t Camera3OutputStream::configureQueueLocked() {
345     status_t res;
346 
347     mTraceFirstBuffer = true;
348     if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
349         return res;
350     }
351 
352     if ((res = configureConsumerQueueLocked()) != OK) {
353         return res;
354     }
355 
356     // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
357     // We need skip these cases as timeout will disable the non-blocking (async) mode.
358     if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
359         if (mUseBufferManager) {
360             // When buffer manager is handling the buffer, we should have available buffers in
361             // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
362             // free buffers.
363             // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
364             // can discard free buffers without notifying buffer manager. We want the timeout to
365             // happen immediately here so buffer manager can try to update its internal state and
366             // try to allocate a buffer instead of waiting.
367             mConsumer->setDequeueTimeout(0);
368         } else {
369             mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
370         }
371     }
372 
373     return OK;
374 }
375 
configureConsumerQueueLocked()376 status_t Camera3OutputStream::configureConsumerQueueLocked() {
377     status_t res;
378 
379     mTraceFirstBuffer = true;
380 
381     ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
382 
383     // Configure consumer-side ANativeWindow interface. The listener may be used
384     // to notify buffer manager (if it is used) of the returned buffers.
385     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
386             /*reportBufferRemoval*/true,
387             /*listener*/mBufferProducerListener);
388     if (res != OK) {
389         ALOGE("%s: Unable to connect to native window for stream %d",
390                 __FUNCTION__, mId);
391         return res;
392     }
393 
394     mConsumerName = mConsumer->getConsumerName();
395 
396     res = native_window_set_usage(mConsumer.get(), mUsage);
397     if (res != OK) {
398         ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
399                 __FUNCTION__, mUsage, mId);
400         return res;
401     }
402 
403     res = native_window_set_scaling_mode(mConsumer.get(),
404             NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
405     if (res != OK) {
406         ALOGE("%s: Unable to configure stream scaling: %s (%d)",
407                 __FUNCTION__, strerror(-res), res);
408         return res;
409     }
410 
411     if (mMaxSize == 0) {
412         // For buffers of known size
413         res = native_window_set_buffers_dimensions(mConsumer.get(),
414                 camera3_stream::width, camera3_stream::height);
415     } else {
416         // For buffers with bounded size
417         res = native_window_set_buffers_dimensions(mConsumer.get(),
418                 mMaxSize, 1);
419     }
420     if (res != OK) {
421         ALOGE("%s: Unable to configure stream buffer dimensions"
422                 " %d x %d (maxSize %zu) for stream %d",
423                 __FUNCTION__, camera3_stream::width, camera3_stream::height,
424                 mMaxSize, mId);
425         return res;
426     }
427     res = native_window_set_buffers_format(mConsumer.get(),
428             camera3_stream::format);
429     if (res != OK) {
430         ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
431                 __FUNCTION__, camera3_stream::format, mId);
432         return res;
433     }
434 
435     res = native_window_set_buffers_data_space(mConsumer.get(),
436             camera3_stream::data_space);
437     if (res != OK) {
438         ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
439                 __FUNCTION__, camera3_stream::data_space, mId);
440         return res;
441     }
442 
443     int maxConsumerBuffers;
444     res = static_cast<ANativeWindow*>(mConsumer.get())->query(
445             mConsumer.get(),
446             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
447     if (res != OK) {
448         ALOGE("%s: Unable to query consumer undequeued"
449                 " buffer count for stream %d", __FUNCTION__, mId);
450         return res;
451     }
452 
453     ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
454             maxConsumerBuffers, camera3_stream::max_buffers);
455     if (camera3_stream::max_buffers == 0) {
456         ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
457                 __FUNCTION__, camera3_stream::max_buffers);
458         return INVALID_OPERATION;
459     }
460 
461     mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
462     mHandoutTotalBufferCount = 0;
463     mFrameCount = 0;
464     mLastTimestamp = 0;
465     mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
466 
467     res = native_window_set_buffer_count(mConsumer.get(),
468             mTotalBufferCount);
469     if (res != OK) {
470         ALOGE("%s: Unable to set buffer count for stream %d",
471                 __FUNCTION__, mId);
472         return res;
473     }
474 
475     res = native_window_set_buffers_transform(mConsumer.get(),
476             mTransform);
477     if (res != OK) {
478         ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
479                 __FUNCTION__, mTransform, strerror(-res), res);
480         return res;
481     }
482 
483     /**
484      * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
485      * buffers to be statically allocated for internal static buffer registration, while the
486      * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
487      * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
488      * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
489      * HAL3.2 devices may not support the dynamic buffer registeration.
490      * Also Camera3BufferManager does not support display/texture streams as they have its own
491      * buffer management logic.
492      */
493     if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
494             !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
495         uint64_t consumerUsage = 0;
496         getEndpointUsage(&consumerUsage);
497         StreamInfo streamInfo(
498                 getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
499                 mUsage | consumerUsage, mTotalBufferCount,
500                 /*isConfigured*/true);
501         wp<Camera3OutputStream> weakThis(this);
502         res = mBufferManager->registerStream(weakThis,
503                 streamInfo);
504         if (res == OK) {
505             // Disable buffer allocation for this BufferQueue, buffer manager will take over
506             // the buffer allocation responsibility.
507             mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
508             mUseBufferManager = true;
509         } else {
510             ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
511                   "(error %d %s), fall back to BufferQueue for buffer management!",
512                   __FUNCTION__, mId, res, strerror(-res));
513         }
514     }
515 
516     return OK;
517 }
518 
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)519 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
520     ATRACE_HFR_CALL();
521     status_t res;
522 
523     if ((res = getBufferPreconditionCheckLocked()) != OK) {
524         return res;
525     }
526 
527     bool gotBufferFromManager = false;
528 
529     if (mUseBufferManager) {
530         sp<GraphicBuffer> gb;
531         res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
532         if (res == OK) {
533             // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
534             // successful return.
535             *anb = gb.get();
536             res = mConsumer->attachBuffer(*anb);
537             if (shouldLogError(res, mState)) {
538                 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
539                         __FUNCTION__, mId, strerror(-res), res);
540             }
541             if (res != OK) {
542                 checkRetAndSetAbandonedLocked(res);
543                 return res;
544             }
545             gotBufferFromManager = true;
546             ALOGV("Stream %d: Attached new buffer", getId());
547         } else if (res == ALREADY_EXISTS) {
548             // Have sufficient free buffers already attached, can just
549             // dequeue from buffer queue
550             ALOGV("Stream %d: Reusing attached buffer", getId());
551             gotBufferFromManager = false;
552         } else if (res != OK) {
553             ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
554                     __FUNCTION__, mId, strerror(-res), res);
555             return res;
556         }
557     }
558     if (!gotBufferFromManager) {
559         /**
560          * Release the lock briefly to avoid deadlock for below scenario:
561          * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
562          * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
563          * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
564          * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
565          * StreamingProcessor lock.
566          * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
567          * and try to lock bufferQueue lock.
568          * Then there is circular locking dependency.
569          */
570         sp<ANativeWindow> currentConsumer = mConsumer;
571         mLock.unlock();
572 
573         nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
574         res = currentConsumer->dequeueBuffer(currentConsumer.get(), anb, fenceFd);
575         nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
576         mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
577 
578         mLock.lock();
579 
580         if (mUseBufferManager && res == TIMED_OUT) {
581             checkRemovedBuffersLocked();
582 
583             sp<GraphicBuffer> gb;
584             res = mBufferManager->getBufferForStream(
585                     getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
586 
587             if (res == OK) {
588                 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
589                 // a successful return.
590                 *anb = gb.get();
591                 res = mConsumer->attachBuffer(*anb);
592                 gotBufferFromManager = true;
593                 ALOGV("Stream %d: Attached new buffer", getId());
594 
595                 if (res != OK) {
596                     if (shouldLogError(res, mState)) {
597                         ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
598                                 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
599                     }
600                     checkRetAndSetAbandonedLocked(res);
601                     return res;
602                 }
603             } else {
604                 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
605                         " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
606                 return res;
607             }
608         } else if (res != OK) {
609             if (shouldLogError(res, mState)) {
610                 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
611                         __FUNCTION__, mId, strerror(-res), res);
612             }
613             checkRetAndSetAbandonedLocked(res);
614             return res;
615         }
616     }
617 
618     if (res == OK) {
619         checkRemovedBuffersLocked();
620     }
621 
622     return res;
623 }
624 
checkRemovedBuffersLocked(bool notifyBufferManager)625 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
626     std::vector<sp<GraphicBuffer>> removedBuffers;
627     status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
628     if (res == OK) {
629         onBuffersRemovedLocked(removedBuffers);
630 
631         if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
632             mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
633         }
634     }
635 }
636 
checkRetAndSetAbandonedLocked(status_t res)637 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
638     // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
639     // STATE_PREPARING, let prepareNextBuffer handle the error.)
640     if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
641         mState = STATE_ABANDONED;
642     }
643 }
644 
shouldLogError(status_t res,StreamState state)645 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
646     if (res == OK) {
647         return false;
648     }
649     if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
650         return false;
651     }
652     return true;
653 }
654 
disconnectLocked()655 status_t Camera3OutputStream::disconnectLocked() {
656     status_t res;
657 
658     if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
659         return res;
660     }
661 
662     // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
663     // state), don't need change the stream state, return OK.
664     if (mConsumer == nullptr) {
665         return OK;
666     }
667 
668     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
669 
670     res = native_window_api_disconnect(mConsumer.get(),
671                                        NATIVE_WINDOW_API_CAMERA);
672     /**
673      * This is not an error. if client calling process dies, the window will
674      * also die and all calls to it will return DEAD_OBJECT, thus it's already
675      * "disconnected"
676      */
677     if (res == DEAD_OBJECT) {
678         ALOGW("%s: While disconnecting stream %d from native window, the"
679                 " native window died from under us", __FUNCTION__, mId);
680     }
681     else if (res != OK) {
682         ALOGE("%s: Unable to disconnect stream %d from native window "
683               "(error %d %s)",
684               __FUNCTION__, mId, res, strerror(-res));
685         mState = STATE_ERROR;
686         return res;
687     }
688 
689     // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
690     // stream at this point should be safe.
691     if (mUseBufferManager) {
692         res = mBufferManager->unregisterStream(getId(), getStreamSetId());
693         if (res != OK) {
694             ALOGE("%s: Unable to unregister stream %d from buffer manager "
695                     "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
696             mState = STATE_ERROR;
697             return res;
698         }
699         // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
700         // the stream is still in usable state after this call.
701         mUseBufferManager = false;
702     }
703 
704     mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
705                                            : STATE_CONSTRUCTED;
706 
707     mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
708     mDequeueBufferLatency.reset();
709     return OK;
710 }
711 
getEndpointUsage(uint64_t * usage) const712 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
713 
714     status_t res;
715 
716     if (mConsumer == nullptr) {
717         // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
718         *usage = mConsumerUsage;
719         return OK;
720     }
721 
722     res = getEndpointUsageForSurface(usage, mConsumer);
723 
724     return res;
725 }
726 
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)727 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
728     if (consumerUsage == nullptr) {
729         return;
730     }
731 
732     // If an opaque output stream's endpoint is ImageReader, add
733     // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
734     // for the ZSL use case.
735     // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
736     //     1. GRALLOC_USAGE_HW_TEXTURE
737     //     2. GRALLOC_USAGE_HW_RENDER
738     //     3. GRALLOC_USAGE_HW_COMPOSER
739     //     4. GRALLOC_USAGE_HW_VIDEO_ENCODER
740     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
741             (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
742             GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
743         *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
744     }
745 }
746 
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface) const747 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
748         const sp<Surface>& surface) const {
749     status_t res;
750     uint64_t u = 0;
751 
752     res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
753     applyZSLUsageQuirk(camera3_stream::format, &u);
754     *usage = u;
755     return res;
756 }
757 
isVideoStream() const758 bool Camera3OutputStream::isVideoStream() const {
759     uint64_t usage = 0;
760     status_t res = getEndpointUsage(&usage);
761     if (res != OK) {
762         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
763         return false;
764     }
765 
766     return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
767 }
768 
setBufferManager(sp<Camera3BufferManager> bufferManager)769 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
770     Mutex::Autolock l(mLock);
771     if (mState != STATE_CONSTRUCTED) {
772         ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
773                 __FUNCTION__);
774         return INVALID_OPERATION;
775     }
776     mBufferManager = bufferManager;
777 
778     return OK;
779 }
780 
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)781 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
782             const std::vector<OutputStreamInfo> &/*outputInfo*/,
783             const std::vector<size_t> &/*removedSurfaceIds*/,
784             KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
785     ALOGE("%s: this method is not supported!", __FUNCTION__);
786     return INVALID_OPERATION;
787 }
788 
onBufferReleased()789 void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
790     sp<Camera3OutputStream> stream = mParent.promote();
791     if (stream == nullptr) {
792         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
793         return;
794     }
795 
796     Mutex::Autolock l(stream->mLock);
797     if (!(stream->mUseBufferManager)) {
798         return;
799     }
800 
801     ALOGV("Stream %d: Buffer released", stream->getId());
802     bool shouldFreeBuffer = false;
803     status_t res = stream->mBufferManager->onBufferReleased(
804         stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
805     if (res != OK) {
806         ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
807                 strerror(-res), res);
808         stream->mState = STATE_ERROR;
809     }
810 
811     if (shouldFreeBuffer) {
812         sp<GraphicBuffer> buffer;
813         // Detach and free a buffer (when buffer goes out of scope)
814         stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
815         if (buffer.get() != nullptr) {
816             stream->mBufferManager->notifyBufferRemoved(
817                     stream->getId(), stream->getStreamSetId());
818         }
819     }
820 }
821 
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)822 void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
823         const std::vector<sp<GraphicBuffer>>& buffers) {
824     sp<Camera3OutputStream> stream = mParent.promote();
825     if (stream == nullptr) {
826         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
827         return;
828     }
829 
830     if (buffers.size() > 0) {
831         Mutex::Autolock l(stream->mLock);
832         stream->onBuffersRemovedLocked(buffers);
833         if (stream->mUseBufferManager) {
834             stream->mBufferManager->onBuffersRemoved(stream->getId(),
835                     stream->getStreamSetId(), buffers.size());
836         }
837         ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
838     }
839 }
840 
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)841 void Camera3OutputStream::onBuffersRemovedLocked(
842         const std::vector<sp<GraphicBuffer>>& removedBuffers) {
843     sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
844     if (callback != nullptr) {
845         for (const auto& gb : removedBuffers) {
846             callback->onBufferFreed(mId, gb->handle);
847         }
848     }
849 }
850 
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)851 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
852     Mutex::Autolock l(mLock);
853     return detachBufferLocked(buffer, fenceFd);
854 }
855 
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)856 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
857     ALOGV("Stream %d: detachBuffer", getId());
858     if (buffer == nullptr) {
859         return BAD_VALUE;
860     }
861 
862     sp<Fence> fence;
863     status_t res = mConsumer->detachNextBuffer(buffer, &fence);
864     if (res == NO_MEMORY) {
865         // This may rarely happen, which indicates that the released buffer was freed by other
866         // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
867         // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
868         // therefore log a warning.
869         *buffer = 0;
870         ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
871     } else if (res != OK) {
872         // Treat other errors as abandonment
873         if (shouldLogError(res, mState)) {
874             ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
875         }
876         mState = STATE_ABANDONED;
877         return res;
878     }
879 
880     if (fenceFd != nullptr) {
881         if (fence!= 0 && fence->isValid()) {
882             *fenceFd = fence->dup();
883         } else {
884             *fenceFd = -1;
885         }
886     }
887 
888     // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
889     checkRemovedBuffersLocked(/*notifyBufferManager*/false);
890     return res;
891 }
892 
dropBuffers(bool dropping)893 status_t Camera3OutputStream::dropBuffers(bool dropping) {
894     Mutex::Autolock l(mLock);
895     mDropBuffers = dropping;
896     return OK;
897 }
898 
getPhysicalCameraId() const899 const String8& Camera3OutputStream::getPhysicalCameraId() const {
900     Mutex::Autolock l(mLock);
901     return physicalCameraId();
902 }
903 
notifyBufferReleased(ANativeWindowBuffer *)904 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
905     return OK;
906 }
907 
isConsumerConfigurationDeferred(size_t surface_id) const908 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
909     Mutex::Autolock l(mLock);
910 
911     if (surface_id != 0) {
912         ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
913     }
914     return mConsumer == nullptr;
915 }
916 
setConsumers(const std::vector<sp<Surface>> & consumers)917 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
918     Mutex::Autolock l(mLock);
919     if (consumers.size() != 1) {
920         ALOGE("%s: it's illegal to set %zu consumer surfaces!",
921                   __FUNCTION__, consumers.size());
922         return INVALID_OPERATION;
923     }
924     if (consumers[0] == nullptr) {
925         ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
926         return INVALID_OPERATION;
927     }
928 
929     if (mConsumer != nullptr) {
930         ALOGE("%s: consumer surface was already set!", __FUNCTION__);
931         return INVALID_OPERATION;
932     }
933 
934     mConsumer = consumers[0];
935     return OK;
936 }
937 
isConsumedByHWComposer() const938 bool Camera3OutputStream::isConsumedByHWComposer() const {
939     uint64_t usage = 0;
940     status_t res = getEndpointUsage(&usage);
941     if (res != OK) {
942         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
943         return false;
944     }
945 
946     return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
947 }
948 
isConsumedByHWTexture() const949 bool Camera3OutputStream::isConsumedByHWTexture() const {
950     uint64_t usage = 0;
951     status_t res = getEndpointUsage(&usage);
952     if (res != OK) {
953         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
954         return false;
955     }
956 
957     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
958 }
959 
960 }; // namespace camera3
961 
962 }; // namespace android
963