1 /*
2  * Copyright (C) 2013-2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <algorithm>
22 #include <ctime>
23 #include <fstream>
24 
25 #include <aidl/android/hardware/camera/device/CameraBlob.h>
26 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
27 #include "aidl/android/hardware/graphics/common/Dataspace.h"
28 
29 #include <android-base/unique_fd.h>
30 #include <com_android_internal_camera_flags.h>
31 #include <cutils/properties.h>
32 #include <ui/GraphicBuffer.h>
33 #include <utils/Log.h>
34 #include <utils/Trace.h>
35 #include <camera/StringUtils.h>
36 
37 #include <common/CameraDeviceBase.h>
38 #include "api1/client2/JpegProcessor.h"
39 #include "Camera3OutputStream.h"
40 #include "utils/TraceHFR.h"
41 
42 #ifndef container_of
43 #define container_of(ptr, type, member) \
44     (type *)((char*)(ptr) - offsetof(type, member))
45 #endif
46 
47 namespace flags = com::android::internal::camera::flags;
48 
49 namespace android {
50 
51 namespace camera3 {
52 
53 using aidl::android::hardware::camera::device::CameraBlob;
54 using aidl::android::hardware::camera::device::CameraBlobId;
55 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,IPCTransport transport,int setId,bool isMultiResolution,int64_t dynamicRangeProfile,int64_t streamUseCase,bool deviceTimeBaseIsRealtime,int timestampBase,int mirrorMode,int32_t colorSpace,bool useReadoutTimestamp)56 Camera3OutputStream::Camera3OutputStream(int id,
57         sp<Surface> consumer,
58         uint32_t width, uint32_t height, int format,
59         android_dataspace dataSpace, camera_stream_rotation_t rotation,
60         nsecs_t timestampOffset, const std::string& physicalCameraId,
61         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
62         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
63         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
64         int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
65         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
66                             /*maxSize*/0, format, dataSpace, rotation,
67                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
68                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
69                             timestampBase, colorSpace),
70         mConsumer(consumer),
71         mTransform(0),
72         mTraceFirstBuffer(true),
73         mUseBufferManager(false),
74         mTimestampOffset(timestampOffset),
75         mUseReadoutTime(useReadoutTimestamp),
76         mConsumerUsage(0),
77         mDropBuffers(false),
78         mMirrorMode(mirrorMode),
79         mDequeueBufferLatency(kDequeueLatencyBinSize),
80         mIPCTransport(transport) {
81 
82     if (mConsumer == NULL) {
83         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
84         mState = STATE_ERROR;
85     }
86 
87     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
88     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
89 }
90 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,IPCTransport transport,int setId,bool isMultiResolution,int64_t dynamicRangeProfile,int64_t streamUseCase,bool deviceTimeBaseIsRealtime,int timestampBase,int mirrorMode,int32_t colorSpace,bool useReadoutTimestamp)91 Camera3OutputStream::Camera3OutputStream(int id,
92         sp<Surface> consumer,
93         uint32_t width, uint32_t height, size_t maxSize, int format,
94         android_dataspace dataSpace, camera_stream_rotation_t rotation,
95         nsecs_t timestampOffset, const std::string& physicalCameraId,
96         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
97         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
98         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
99         int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
100         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
101                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
102                             setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
103                             deviceTimeBaseIsRealtime, timestampBase, colorSpace),
104         mConsumer(consumer),
105         mTransform(0),
106         mTraceFirstBuffer(true),
107         mUseBufferManager(false),
108         mTimestampOffset(timestampOffset),
109         mUseReadoutTime(useReadoutTimestamp),
110         mConsumerUsage(0),
111         mDropBuffers(false),
112         mMirrorMode(mirrorMode),
113         mDequeueBufferLatency(kDequeueLatencyBinSize),
114         mIPCTransport(transport) {
115 
116     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
117         ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
118                 format);
119         mState = STATE_ERROR;
120     }
121 
122     if (mConsumer == NULL) {
123         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
124         mState = STATE_ERROR;
125     }
126 
127     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
128     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
129 }
130 
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,IPCTransport transport,int setId,bool isMultiResolution,int64_t dynamicRangeProfile,int64_t streamUseCase,bool deviceTimeBaseIsRealtime,int timestampBase,int mirrorMode,int32_t colorSpace,bool useReadoutTimestamp)131 Camera3OutputStream::Camera3OutputStream(int id,
132         uint32_t width, uint32_t height, int format,
133         uint64_t consumerUsage, android_dataspace dataSpace,
134         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
135         const std::string& physicalCameraId,
136         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
137         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
138         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
139         int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
140         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
141                             /*maxSize*/0, format, dataSpace, rotation,
142                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
143                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
144                             timestampBase, colorSpace),
145         mConsumer(nullptr),
146         mTransform(0),
147         mTraceFirstBuffer(true),
148         mUseBufferManager(false),
149         mTimestampOffset(timestampOffset),
150         mUseReadoutTime(useReadoutTimestamp),
151         mConsumerUsage(consumerUsage),
152         mDropBuffers(false),
153         mMirrorMode(mirrorMode),
154         mDequeueBufferLatency(kDequeueLatencyBinSize),
155         mIPCTransport(transport) {
156     // Deferred consumer only support preview surface format now.
157     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
158         ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
159                 __FUNCTION__);
160         mState = STATE_ERROR;
161     }
162 
163     // Validation check for the consumer usage flag.
164     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
165             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
166         ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
167               __FUNCTION__, consumerUsage);
168         mState = STATE_ERROR;
169     }
170 
171     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
172     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
173 }
174 
Camera3OutputStream(int id,camera_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,IPCTransport transport,uint64_t consumerUsage,nsecs_t timestampOffset,int setId,bool isMultiResolution,int64_t dynamicRangeProfile,int64_t streamUseCase,bool deviceTimeBaseIsRealtime,int timestampBase,int mirrorMode,int32_t colorSpace,bool useReadoutTimestamp)175 Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
176                                          uint32_t width, uint32_t height,
177                                          int format,
178                                          android_dataspace dataSpace,
179                                          camera_stream_rotation_t rotation,
180                                          const std::string& physicalCameraId,
181                                          const std::unordered_set<int32_t> &sensorPixelModesUsed,
182                                          IPCTransport transport,
183                                          uint64_t consumerUsage, nsecs_t timestampOffset,
184                                          int setId, bool isMultiResolution,
185                                          int64_t dynamicRangeProfile, int64_t streamUseCase,
186                                          bool deviceTimeBaseIsRealtime, int timestampBase,
187                                          int mirrorMode, int32_t colorSpace,
188                                          bool useReadoutTimestamp) :
189         Camera3IOStreamBase(id, type, width, height,
190                             /*maxSize*/0,
191                             format, dataSpace, rotation,
192                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
193                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
194                             timestampBase, colorSpace),
195         mTransform(0),
196         mTraceFirstBuffer(true),
197         mUseBufferManager(false),
198         mTimestampOffset(timestampOffset),
199         mUseReadoutTime(useReadoutTimestamp),
200         mConsumerUsage(consumerUsage),
201         mDropBuffers(false),
202         mMirrorMode(mirrorMode),
203         mDequeueBufferLatency(kDequeueLatencyBinSize),
204         mIPCTransport(transport) {
205 
206     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
207     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
208 
209     // Subclasses expected to initialize mConsumer themselves
210 }
211 
212 
~Camera3OutputStream()213 Camera3OutputStream::~Camera3OutputStream() {
214     disconnectLocked();
215 }
216 
getBufferLocked(camera_stream_buffer * buffer,const std::vector<size_t> &)217 status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
218         const std::vector<size_t>&) {
219     ATRACE_HFR_CALL();
220 
221     ANativeWindowBuffer* anb;
222     int fenceFd = -1;
223 
224     status_t res;
225     res = getBufferLockedCommon(&anb, &fenceFd);
226     if (res != OK) {
227         return res;
228     }
229 
230     /**
231      * FenceFD now owned by HAL except in case of error,
232      * in which case we reassign it to acquire_fence
233      */
234     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
235                         /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
236 
237     return OK;
238 }
239 
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)240 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
241             ANativeWindowBuffer* buffer, int anwReleaseFence,
242             const std::vector<size_t>&) {
243     return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
244 }
245 
returnBufferLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,nsecs_t readoutTimestamp,int32_t transform,const std::vector<size_t> & surface_ids)246 status_t Camera3OutputStream::returnBufferLocked(
247         const camera_stream_buffer &buffer,
248         nsecs_t timestamp, nsecs_t readoutTimestamp,
249         int32_t transform, const std::vector<size_t>& surface_ids) {
250     ATRACE_HFR_CALL();
251 
252     if (mHandoutTotalBufferCount == 1) {
253         returnPrefetchedBuffersLocked();
254     }
255 
256     status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
257                                          /*output*/true, transform, surface_ids);
258 
259     if (res != OK) {
260         return res;
261     }
262 
263     mLastTimestamp = timestamp;
264     mFrameCount++;
265 
266     return OK;
267 }
268 
fixUpHidlJpegBlobHeader(ANativeWindowBuffer * anwBuffer,int fence)269 status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
270     // Lock the JPEG buffer for CPU read
271     sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
272     void* mapped = nullptr;
273     base::unique_fd fenceFd(dup(fence));
274     // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
275     // header.
276     GraphicBufferLocker gbLocker(graphicBuffer);
277     status_t res =
278             gbLocker.lockAsync(
279                     GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
280                     &mapped, fenceFd.release());
281     if (res != OK) {
282         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
283         return res;
284     }
285 
286     uint8_t *hidlHeaderStart =
287             static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
288     // Check that the jpeg buffer is big enough to contain HIDL camera blob
289     if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
290         ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
291                 graphicBuffer->getWidth());
292         return BAD_VALUE;
293     }
294     camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
295 
296     // Check that the blob is indeed the jpeg blob id.
297     if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
298         ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
299         return BAD_VALUE;
300     }
301 
302     // Retrieve id and blob size
303     CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
304     uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
305 
306     if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
307         ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
308                   blobSizeBytes, graphicBuffer->getWidth());
309     }
310 
311     uint8_t *aidlHeaderStart =
312             static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
313 
314     // Check that the jpeg buffer is big enough to contain AIDL camera blob
315     if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
316         ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
317                 graphicBuffer->getWidth());
318         return BAD_VALUE;
319     }
320 
321     if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
322         ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
323                 " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
324                 graphicBuffer->getWidth());
325         return BAD_VALUE;
326     }
327 
328     // Fill in JPEG header
329     CameraBlob aidlHeader = {
330             .blobId = blobId,
331             .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
332     };
333     memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
334     graphicBuffer->unlock();
335     return OK;
336 }
337 
returnBufferCheckedLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,nsecs_t readoutTimestamp,bool output,int32_t transform,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)338 status_t Camera3OutputStream::returnBufferCheckedLocked(
339             const camera_stream_buffer &buffer,
340             nsecs_t timestamp,
341             nsecs_t readoutTimestamp,
342             [[maybe_unused]] bool output,
343             int32_t transform,
344             const std::vector<size_t>& surface_ids,
345             /*out*/
346             sp<Fence> *releaseFenceOut) {
347 
348     ALOG_ASSERT(output, "Expected output to be true");
349 
350     status_t res;
351 
352     // Fence management - always honor release fence from HAL
353     sp<Fence> releaseFence = new Fence(buffer.release_fence);
354     int anwReleaseFence = releaseFence->dup();
355 
356     /**
357      * Release the lock briefly to avoid deadlock with
358      * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
359      * thread will go into StreamingProcessor::onFrameAvailable) during
360      * queueBuffer
361      */
362     sp<ANativeWindow> currentConsumer = mConsumer;
363     StreamState state = mState;
364     mLock.unlock();
365 
366     ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
367     bool bufferDeferred = false;
368     /**
369      * Return buffer back to ANativeWindow
370      */
371     if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
372         // Cancel buffer
373         if (mDropBuffers) {
374             ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
375         } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
376             ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
377         } else {
378             ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
379         }
380 
381         res = currentConsumer->cancelBuffer(currentConsumer.get(),
382                 anwBuffer,
383                 anwReleaseFence);
384         if (shouldLogError(res, state)) {
385             ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
386                   " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
387         }
388 
389         notifyBufferReleased(anwBuffer);
390         if (mUseBufferManager) {
391             // Return this buffer back to buffer manager.
392             mBufferProducerListener->onBufferReleased();
393         }
394     } else {
395         if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
396             {
397                 char traceLog[48];
398                 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
399                 ATRACE_NAME(traceLog);
400             }
401             mTraceFirstBuffer = false;
402         }
403         // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
404         if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
405                     (getDataSpace() ==
406                      static_cast<android_dataspace_t>(
407                          aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
408             if (mIPCTransport == IPCTransport::HIDL) {
409                 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
410             }
411             // If this is a JPEG output, and image dump mask is set, save image to
412             // disk.
413             if (mImageDumpMask) {
414                 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
415             }
416         }
417 
418         nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
419                 readoutTimestamp : timestamp) - mTimestampOffset;
420         if (mPreviewFrameSpacer != nullptr) {
421             nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
422                     - mTimestampOffset;
423             res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
424                     transform, anwBuffer, anwReleaseFence);
425             if (res != OK) {
426                 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
427                         __FUNCTION__, mId, strerror(-res), res);
428                 return res;
429             }
430             bufferDeferred = true;
431         } else {
432             nsecs_t presentTime = mSyncToDisplay ?
433                     syncTimestampToDisplayLocked(captureTime, releaseFence) : captureTime;
434 
435             setTransform(transform, true/*mayChangeMirror*/);
436             res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
437             if (res != OK) {
438                 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
439                       __FUNCTION__, mId, strerror(-res), res);
440                 return res;
441             }
442 
443             queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
444 
445             res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
446             if (shouldLogError(res, state)) {
447                 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
448                       " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
449             }
450         }
451     }
452     mLock.lock();
453 
454     if (bufferDeferred) {
455         mCachedOutputBufferCount++;
456     }
457 
458     // Once a valid buffer has been returned to the queue, can no longer
459     // dequeue all buffers for preallocation.
460     if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
461         mStreamUnpreparable = true;
462     }
463 
464     *releaseFenceOut = releaseFence;
465 
466     return res;
467 }
468 
dump(int fd,const Vector<String16> & args)469 void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
470     std::string lines;
471     lines += fmt::sprintf("    Stream[%d]: Output\n", mId);
472     lines += fmt::sprintf("      Consumer name: %s\n", (mConsumer.get() != nullptr) ?
473             mConsumer->getConsumerName() : "Deferred");
474     write(fd, lines.c_str(), lines.size());
475 
476     Camera3IOStreamBase::dump(fd, args);
477 
478     mDequeueBufferLatency.dump(fd,
479         "      DequeueBuffer latency histogram:");
480 }
481 
setTransform(int transform,bool mayChangeMirror)482 status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
483     ATRACE_CALL();
484     Mutex::Autolock l(mLock);
485     if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
486         // If the mirroring mode is not AUTO, do not allow transform update
487         // which may change mirror.
488         return OK;
489     }
490 
491     return setTransformLocked(transform);
492 }
493 
setTransformLocked(int transform)494 status_t Camera3OutputStream::setTransformLocked(int transform) {
495     status_t res = OK;
496 
497     if (transform == -1) return res;
498 
499     if (mState == STATE_ERROR) {
500         ALOGE("%s: Stream in error state", __FUNCTION__);
501         return INVALID_OPERATION;
502     }
503 
504     mTransform = transform;
505     if (mState == STATE_CONFIGURED) {
506         res = native_window_set_buffers_transform(mConsumer.get(),
507                 transform);
508         if (res != OK) {
509             ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
510                     __FUNCTION__, transform, strerror(-res), res);
511         }
512     }
513     return res;
514 }
515 
configureQueueLocked()516 status_t Camera3OutputStream::configureQueueLocked() {
517     status_t res;
518 
519     mTraceFirstBuffer = true;
520     if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
521         return res;
522     }
523 
524     if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
525         return res;
526     }
527 
528     // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
529     // We need skip these cases as timeout will disable the non-blocking (async) mode.
530     if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
531         if (mUseBufferManager) {
532             // When buffer manager is handling the buffer, we should have available buffers in
533             // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
534             // free buffers.
535             // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
536             // can discard free buffers without notifying buffer manager. We want the timeout to
537             // happen immediately here so buffer manager can try to update its internal state and
538             // try to allocate a buffer instead of waiting.
539             mConsumer->setDequeueTimeout(0);
540         } else {
541             mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
542         }
543     }
544 
545     return OK;
546 }
547 
configureConsumerQueueLocked(bool allowPreviewRespace)548 status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
549     status_t res;
550 
551     mTraceFirstBuffer = true;
552 
553     ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
554 
555     // Configure consumer-side ANativeWindow interface. The listener may be used
556     // to notify buffer manager (if it is used) of the returned buffers.
557     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
558             /*reportBufferRemoval*/true,
559             /*listener*/mBufferProducerListener);
560     if (res != OK) {
561         ALOGE("%s: Unable to connect to native window for stream %d",
562                 __FUNCTION__, mId);
563         return res;
564     }
565 
566     res = native_window_set_usage(mConsumer.get(), mUsage);
567     if (res != OK) {
568         ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
569                 __FUNCTION__, mUsage, mId);
570         return res;
571     }
572 
573     res = native_window_set_scaling_mode(mConsumer.get(),
574             NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
575     if (res != OK) {
576         ALOGE("%s: Unable to configure stream scaling: %s (%d)",
577                 __FUNCTION__, strerror(-res), res);
578         return res;
579     }
580 
581     if (mMaxSize == 0) {
582         // For buffers of known size
583         res = native_window_set_buffers_dimensions(mConsumer.get(),
584                 camera_stream::width, camera_stream::height);
585     } else {
586         // For buffers with bounded size
587         res = native_window_set_buffers_dimensions(mConsumer.get(),
588                 mMaxSize, 1);
589     }
590     if (res != OK) {
591         ALOGE("%s: Unable to configure stream buffer dimensions"
592                 " %d x %d (maxSize %zu) for stream %d",
593                 __FUNCTION__, camera_stream::width, camera_stream::height,
594                 mMaxSize, mId);
595         return res;
596     }
597     res = native_window_set_buffers_format(mConsumer.get(),
598             camera_stream::format);
599     if (res != OK) {
600         ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
601                 __FUNCTION__, camera_stream::format, mId);
602         return res;
603     }
604 
605     res = native_window_set_buffers_data_space(mConsumer.get(),
606             camera_stream::data_space);
607     if (res != OK) {
608         ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
609                 __FUNCTION__, camera_stream::data_space, mId);
610         return res;
611     }
612 
613     int maxConsumerBuffers = 0;
614     res = static_cast<ANativeWindow*>(mConsumer.get())->query(
615             mConsumer.get(),
616             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
617     if (res != OK) {
618         ALOGE("%s: Unable to query consumer undequeued"
619                 " buffer count for stream %d", __FUNCTION__, mId);
620         return res;
621     }
622 
623     ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
624             maxConsumerBuffers, camera_stream::max_buffers);
625     if (camera_stream::max_buffers == 0) {
626         ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
627                 __FUNCTION__, camera_stream::max_buffers);
628         return INVALID_OPERATION;
629     }
630 
631     mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
632 
633     int timestampBase = getTimestampBase();
634     bool isDefaultTimeBase = (timestampBase ==
635             OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
636     if (allowPreviewRespace)  {
637         bool forceChoreographer = (timestampBase ==
638                 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
639         bool defaultToChoreographer = (isDefaultTimeBase &&
640                 isConsumedByHWComposer());
641         bool defaultToSpacer = (isDefaultTimeBase &&
642                 isConsumedByHWTexture() &&
643                 !isConsumedByCPU() &&
644                 !isVideoStream());
645         if (forceChoreographer || defaultToChoreographer) {
646             mSyncToDisplay = true;
647             // For choreographer synced stream, extra buffers aren't kept by
648             // camera service. So no need to update mMaxCachedBufferCount.
649             mTotalBufferCount += kDisplaySyncExtraBuffer;
650         } else if (defaultToSpacer) {
651             mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
652             // For preview frame spacer, the extra buffer is kept by camera
653             // service. So update mMaxCachedBufferCount.
654             mMaxCachedBufferCount = 1;
655             mTotalBufferCount += mMaxCachedBufferCount;
656             res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
657                     + std::to_string(mId)).c_str());
658             if (res != OK) {
659                 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
660                         strerror(-res), res);
661                 return res;
662             }
663         }
664     }
665     mHandoutTotalBufferCount = 0;
666     mFrameCount = 0;
667     mLastTimestamp = 0;
668 
669     if (isDeviceTimeBaseRealtime()) {
670         if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
671             // Default time base, but not hardware composer or video encoder
672             mTimestampOffset = 0;
673         } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
674                 timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
675             mTimestampOffset = 0;
676         }
677         // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
678         // timestamp offset as bootTime - monotonicTime.
679     } else {
680         if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
681             // Reverse offset for monotonicTime -> bootTime
682             mTimestampOffset = -mTimestampOffset;
683         } else {
684             // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
685             // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
686             mTimestampOffset = 0;
687         }
688     }
689 
690     if (flags::surface_ipc()) {
691         res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
692     } else {
693         res = native_window_set_buffer_count(mConsumer.get(), mTotalBufferCount);
694     }
695     if (res != OK) {
696         ALOGE("%s: Unable to set buffer count for stream %d",
697                 __FUNCTION__, mId);
698         return res;
699     }
700 
701     res = native_window_set_buffers_transform(mConsumer.get(),
702             mTransform);
703     if (res != OK) {
704         ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
705                 __FUNCTION__, mTransform, strerror(-res), res);
706         return res;
707     }
708 
709     /**
710      * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
711      * buffers to be statically allocated for internal static buffer registration, while the
712      * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
713      * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
714      * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
715      * HAL3.2 devices may not support the dynamic buffer registeration.
716      * Also Camera3BufferManager does not support display/texture streams as they have its own
717      * buffer management logic.
718      */
719     if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
720             !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
721         uint64_t consumerUsage = 0;
722         getEndpointUsage(&consumerUsage);
723         uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
724         uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
725         StreamInfo streamInfo(
726                 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
727                 mUsage | consumerUsage, mTotalBufferCount,
728                 /*isConfigured*/true, isMultiResolution());
729         wp<Camera3OutputStream> weakThis(this);
730         res = mBufferManager->registerStream(weakThis,
731                 streamInfo);
732         if (res == OK) {
733             // Disable buffer allocation for this BufferQueue, buffer manager will take over
734             // the buffer allocation responsibility.
735             mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
736             mUseBufferManager = true;
737         } else {
738             ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
739                   "(error %d %s), fall back to BufferQueue for buffer management!",
740                   __FUNCTION__, mId, res, strerror(-res));
741         }
742     }
743 
744     return OK;
745 }
746 
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)747 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
748     ATRACE_HFR_CALL();
749     status_t res;
750 
751     if ((res = getBufferPreconditionCheckLocked()) != OK) {
752         return res;
753     }
754 
755     bool gotBufferFromManager = false;
756 
757     if (mUseBufferManager) {
758         sp<GraphicBuffer> gb;
759         res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
760                 isMultiResolution(), &gb, fenceFd);
761         if (res == OK) {
762             // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
763             // successful return.
764             *anb = gb.get();
765             res = mConsumer->attachBuffer(*anb);
766             if (shouldLogError(res, mState)) {
767                 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
768                         __FUNCTION__, mId, strerror(-res), res);
769             }
770             if (res != OK) {
771                 checkRetAndSetAbandonedLocked(res);
772                 return res;
773             }
774             gotBufferFromManager = true;
775             ALOGV("Stream %d: Attached new buffer", getId());
776         } else if (res == ALREADY_EXISTS) {
777             // Have sufficient free buffers already attached, can just
778             // dequeue from buffer queue
779             ALOGV("Stream %d: Reusing attached buffer", getId());
780             gotBufferFromManager = false;
781         } else if (res != OK) {
782             ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
783                     __FUNCTION__, mId, strerror(-res), res);
784             return res;
785         }
786     }
787     if (!gotBufferFromManager) {
788         /**
789          * Release the lock briefly to avoid deadlock for below scenario:
790          * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
791          * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
792          * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
793          * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
794          * StreamingProcessor lock.
795          * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
796          * and try to lock bufferQueue lock.
797          * Then there is circular locking dependency.
798          */
799         sp<Surface> consumer = mConsumer;
800         size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
801                                    camera_stream::max_buffers) - mHandoutTotalBufferCount;
802         mLock.unlock();
803 
804         nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
805 
806         size_t batchSize = mBatchSize.load();
807         if (batchSize == 1) {
808             sp<ANativeWindow> anw = consumer;
809             res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
810         } else {
811             std::unique_lock<std::mutex> batchLock(mBatchLock);
812             res = OK;
813             if (mBatchedBuffers.size() == 0) {
814                 if (remainingBuffers == 0) {
815                     ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
816                     return INVALID_OPERATION;
817                 }
818                 if (batchSize > remainingBuffers) {
819                     batchSize = remainingBuffers;
820                 }
821                 batchLock.unlock();
822                 // Refill batched buffers
823                 std::vector<Surface::BatchBuffer> batchedBuffers;
824                 batchedBuffers.resize(batchSize);
825                 res = consumer->dequeueBuffers(&batchedBuffers);
826                 batchLock.lock();
827                 if (res != OK) {
828                     ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
829                             __FUNCTION__, strerror(-res), res);
830                 } else {
831                     mBatchedBuffers = std::move(batchedBuffers);
832                 }
833             }
834 
835             if (res == OK) {
836                 // Dispatch batch buffers
837                 *anb = mBatchedBuffers.back().buffer;
838                 *fenceFd = mBatchedBuffers.back().fenceFd;
839                 mBatchedBuffers.pop_back();
840             }
841         }
842 
843         nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
844         mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
845 
846         mLock.lock();
847 
848         if (mUseBufferManager && res == TIMED_OUT) {
849             checkRemovedBuffersLocked();
850 
851             sp<GraphicBuffer> gb;
852             res = mBufferManager->getBufferForStream(
853                     getId(), getStreamSetId(), isMultiResolution(),
854                     &gb, fenceFd, /*noFreeBuffer*/true);
855 
856             if (res == OK) {
857                 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
858                 // a successful return.
859                 *anb = gb.get();
860                 res = mConsumer->attachBuffer(*anb);
861                 gotBufferFromManager = true;
862                 ALOGV("Stream %d: Attached new buffer", getId());
863 
864                 if (res != OK) {
865                     if (shouldLogError(res, mState)) {
866                         ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
867                                 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
868                     }
869                     checkRetAndSetAbandonedLocked(res);
870                     return res;
871                 }
872             } else {
873                 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
874                         " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
875                 return res;
876             }
877         } else if (res != OK) {
878             if (shouldLogError(res, mState)) {
879                 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
880                         __FUNCTION__, mId, strerror(-res), res);
881             }
882             checkRetAndSetAbandonedLocked(res);
883             return res;
884         }
885     }
886 
887     if (res == OK) {
888         checkRemovedBuffersLocked();
889     }
890 
891     return res;
892 }
893 
checkRemovedBuffersLocked(bool notifyBufferManager)894 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
895     std::vector<sp<GraphicBuffer>> removedBuffers;
896     status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
897     if (res == OK) {
898         onBuffersRemovedLocked(removedBuffers);
899 
900         if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
901             mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
902                     removedBuffers.size());
903         }
904     }
905 }
906 
checkRetAndSetAbandonedLocked(status_t res)907 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
908     // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
909     // STATE_PREPARING, let prepareNextBuffer handle the error.)
910     if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
911         mState = STATE_ABANDONED;
912     }
913 }
914 
shouldLogError(status_t res,StreamState state)915 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
916     if (res == OK) {
917         return false;
918     }
919     if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
920         return false;
921     }
922     return true;
923 }
924 
onCachedBufferQueued()925 void Camera3OutputStream::onCachedBufferQueued() {
926     Mutex::Autolock l(mLock);
927     mCachedOutputBufferCount--;
928     // Signal whoever is waiting for the buffer to be returned to the buffer
929     // queue.
930     mOutputBufferReturnedSignal.signal();
931 }
932 
disconnectLocked()933 status_t Camera3OutputStream::disconnectLocked() {
934     status_t res;
935 
936     if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
937         return res;
938     }
939 
940     // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
941     // state), don't need change the stream state, return OK.
942     if (mConsumer == nullptr) {
943         return OK;
944     }
945 
946     returnPrefetchedBuffersLocked();
947 
948     if (mPreviewFrameSpacer != nullptr) {
949         mPreviewFrameSpacer->requestExit();
950     }
951 
952     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
953 
954     res = native_window_api_disconnect(mConsumer.get(),
955                                        NATIVE_WINDOW_API_CAMERA);
956     /**
957      * This is not an error. if client calling process dies, the window will
958      * also die and all calls to it will return DEAD_OBJECT, thus it's already
959      * "disconnected"
960      */
961     if (res == DEAD_OBJECT) {
962         ALOGW("%s: While disconnecting stream %d from native window, the"
963                 " native window died from under us", __FUNCTION__, mId);
964     }
965     else if (res != OK) {
966         ALOGE("%s: Unable to disconnect stream %d from native window "
967               "(error %d %s)",
968               __FUNCTION__, mId, res, strerror(-res));
969         mState = STATE_ERROR;
970         return res;
971     }
972 
973     // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
974     // stream at this point should be safe.
975     if (mUseBufferManager) {
976         res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
977         if (res != OK) {
978             ALOGE("%s: Unable to unregister stream %d from buffer manager "
979                     "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
980             mState = STATE_ERROR;
981             return res;
982         }
983         // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
984         // the stream is still in usable state after this call.
985         mUseBufferManager = false;
986     }
987 
988     mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
989                                            : STATE_CONSTRUCTED;
990 
991     mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
992     mDequeueBufferLatency.reset();
993     return OK;
994 }
995 
getEndpointUsage(uint64_t * usage)996 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) {
997 
998     status_t res;
999 
1000     if (mConsumer == nullptr) {
1001         // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
1002         *usage = mConsumerUsage;
1003         return OK;
1004     }
1005 
1006     res = getEndpointUsageForSurface(usage, mConsumer);
1007 
1008     return res;
1009 }
1010 
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)1011 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
1012     if (consumerUsage == nullptr) {
1013         return;
1014     }
1015 
1016     // If an opaque output stream's endpoint is ImageReader, add
1017     // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
1018     // for the ZSL use case.
1019     // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
1020     //     1. GRALLOC_USAGE_HW_TEXTURE
1021     //     2. GRALLOC_USAGE_HW_RENDER
1022     //     3. GRALLOC_USAGE_HW_COMPOSER
1023     //     4. GRALLOC_USAGE_HW_VIDEO_ENCODER
1024     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1025             (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
1026             GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
1027         *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1028     }
1029 }
1030 
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface)1031 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
1032         const sp<Surface>& surface) {
1033     bool internalConsumer = (mConsumer.get() != nullptr) && (mConsumer == surface);
1034     if (mConsumerUsageCachedValue.has_value() && flags::surface_ipc() && internalConsumer) {
1035         *usage = mConsumerUsageCachedValue.value();
1036         return OK;
1037     }
1038 
1039     status_t res;
1040 
1041     res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), usage);
1042     applyZSLUsageQuirk(camera_stream::format, usage);
1043     if (internalConsumer) {
1044         mConsumerUsageCachedValue = *usage;
1045     }
1046     return res;
1047 }
1048 
isVideoStream()1049 bool Camera3OutputStream::isVideoStream() {
1050     uint64_t usage = 0;
1051     status_t res = getEndpointUsage(&usage);
1052     if (res != OK) {
1053         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1054         return false;
1055     }
1056 
1057     return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
1058 }
1059 
setBufferManager(sp<Camera3BufferManager> bufferManager)1060 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
1061     Mutex::Autolock l(mLock);
1062     if (mState != STATE_CONSTRUCTED) {
1063         ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
1064                 __FUNCTION__);
1065         return INVALID_OPERATION;
1066     }
1067     mBufferManager = bufferManager;
1068 
1069     return OK;
1070 }
1071 
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)1072 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
1073             const std::vector<OutputStreamInfo> &/*outputInfo*/,
1074             const std::vector<size_t> &/*removedSurfaceIds*/,
1075             KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
1076     ALOGE("%s: this method is not supported!", __FUNCTION__);
1077     return INVALID_OPERATION;
1078 }
1079 
onBufferReleased()1080 void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
1081     sp<Camera3OutputStream> stream = mParent.promote();
1082     if (stream == nullptr) {
1083         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1084         return;
1085     }
1086 
1087     Mutex::Autolock l(stream->mLock);
1088     if (!(stream->mUseBufferManager)) {
1089         return;
1090     }
1091 
1092     ALOGV("Stream %d: Buffer released", stream->getId());
1093     bool shouldFreeBuffer = false;
1094     status_t res = stream->mBufferManager->onBufferReleased(
1095         stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
1096         &shouldFreeBuffer);
1097     if (res != OK) {
1098         ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
1099                 strerror(-res), res);
1100         stream->mState = STATE_ERROR;
1101     }
1102 
1103     if (shouldFreeBuffer) {
1104         sp<GraphicBuffer> buffer;
1105         // Detach and free a buffer (when buffer goes out of scope)
1106         stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
1107         if (buffer.get() != nullptr) {
1108             stream->mBufferManager->notifyBufferRemoved(
1109                     stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
1110         }
1111     }
1112 }
1113 
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)1114 void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
1115         const std::vector<sp<GraphicBuffer>>& buffers) {
1116     sp<Camera3OutputStream> stream = mParent.promote();
1117     if (stream == nullptr) {
1118         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1119         return;
1120     }
1121 
1122     if (buffers.size() > 0) {
1123         Mutex::Autolock l(stream->mLock);
1124         stream->onBuffersRemovedLocked(buffers);
1125         if (stream->mUseBufferManager) {
1126             stream->mBufferManager->onBuffersRemoved(stream->getId(),
1127                     stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
1128         }
1129         ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
1130     }
1131 }
1132 
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)1133 void Camera3OutputStream::onBuffersRemovedLocked(
1134         const std::vector<sp<GraphicBuffer>>& removedBuffers) {
1135     sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
1136     if (callback != nullptr) {
1137         for (const auto& gb : removedBuffers) {
1138             callback->onBufferFreed(mId, gb->handle);
1139         }
1140     }
1141 }
1142 
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)1143 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
1144     Mutex::Autolock l(mLock);
1145     return detachBufferLocked(buffer, fenceFd);
1146 }
1147 
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)1148 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
1149     ALOGV("Stream %d: detachBuffer", getId());
1150     if (buffer == nullptr) {
1151         return BAD_VALUE;
1152     }
1153 
1154     sp<Fence> fence;
1155     status_t res = mConsumer->detachNextBuffer(buffer, &fence);
1156     if (res == NO_MEMORY) {
1157         // This may rarely happen, which indicates that the released buffer was freed by other
1158         // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1159         // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1160         // therefore log a warning.
1161         *buffer = 0;
1162         ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1163     } else if (res != OK) {
1164         // Treat other errors as abandonment
1165         if (shouldLogError(res, mState)) {
1166             ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1167         }
1168         mState = STATE_ABANDONED;
1169         return res;
1170     }
1171 
1172     if (fenceFd != nullptr) {
1173         if (fence!= 0 && fence->isValid()) {
1174             *fenceFd = fence->dup();
1175         } else {
1176             *fenceFd = -1;
1177         }
1178     }
1179 
1180     // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1181     checkRemovedBuffersLocked(/*notifyBufferManager*/false);
1182     return res;
1183 }
1184 
dropBuffers(bool dropping)1185 status_t Camera3OutputStream::dropBuffers(bool dropping) {
1186     Mutex::Autolock l(mLock);
1187     mDropBuffers = dropping;
1188     return OK;
1189 }
1190 
getPhysicalCameraId() const1191 const std::string& Camera3OutputStream::getPhysicalCameraId() const {
1192     Mutex::Autolock l(mLock);
1193     return physicalCameraId();
1194 }
1195 
notifyBufferReleased(ANativeWindowBuffer *)1196 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
1197     return OK;
1198 }
1199 
isConsumerConfigurationDeferred(size_t surface_id) const1200 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
1201     Mutex::Autolock l(mLock);
1202 
1203     if (surface_id != 0) {
1204         ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
1205     }
1206     return mConsumer == nullptr;
1207 }
1208 
setConsumers(const std::vector<sp<Surface>> & consumers)1209 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
1210     Mutex::Autolock l(mLock);
1211     if (consumers.size() != 1) {
1212         ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1213                   __FUNCTION__, consumers.size());
1214         return INVALID_OPERATION;
1215     }
1216     if (consumers[0] == nullptr) {
1217         ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
1218         return INVALID_OPERATION;
1219     }
1220 
1221     if (mConsumer != nullptr) {
1222         ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1223         return INVALID_OPERATION;
1224     }
1225 
1226     mConsumer = consumers[0];
1227     return OK;
1228 }
1229 
isConsumedByHWComposer()1230 bool Camera3OutputStream::isConsumedByHWComposer() {
1231     uint64_t usage = 0;
1232     status_t res = getEndpointUsage(&usage);
1233     if (res != OK) {
1234         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1235         return false;
1236     }
1237 
1238     return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1239 }
1240 
isConsumedByHWTexture()1241 bool Camera3OutputStream::isConsumedByHWTexture() {
1242     uint64_t usage = 0;
1243     status_t res = getEndpointUsage(&usage);
1244     if (res != OK) {
1245         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1246         return false;
1247     }
1248 
1249     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1250 }
1251 
isConsumedByCPU()1252 bool Camera3OutputStream::isConsumedByCPU() {
1253     uint64_t usage = 0;
1254     status_t res = getEndpointUsage(&usage);
1255     if (res != OK) {
1256         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1257         return false;
1258     }
1259 
1260     return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
1261 }
1262 
dumpImageToDisk(nsecs_t timestamp,ANativeWindowBuffer * anwBuffer,int fence)1263 void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1264         ANativeWindowBuffer* anwBuffer, int fence) {
1265     // Deriver output file name
1266     std::string fileExtension = "jpg";
1267     char imageFileName[64];
1268     time_t now = time(0);
1269     tm *localTime = localtime(&now);
1270     snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
1271             1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
1272             localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1273             timestamp, fileExtension.c_str());
1274 
1275     // Lock the image for CPU read
1276     sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1277     void* mapped = nullptr;
1278     base::unique_fd fenceFd(dup(fence));
1279     status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
1280             fenceFd.release());
1281     if (res != OK) {
1282         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1283         return;
1284     }
1285 
1286     // Figure out actual file size
1287     auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1288     if (actualJpegSize == 0) {
1289         actualJpegSize = mMaxSize;
1290     }
1291 
1292     // Output image data to file
1293     std::string filePath = "/data/misc/cameraserver/";
1294     filePath += imageFileName;
1295     std::ofstream imageFile(filePath, std::ofstream::binary);
1296     if (!imageFile.is_open()) {
1297         ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1298         graphicBuffer->unlock();
1299         return;
1300     }
1301     imageFile.write((const char*)mapped, actualJpegSize);
1302 
1303     graphicBuffer->unlock();
1304 }
1305 
setBatchSize(size_t batchSize)1306 status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1307     Mutex::Autolock l(mLock);
1308     if (batchSize == 0) {
1309         ALOGE("%s: invalid batch size 0", __FUNCTION__);
1310         return BAD_VALUE;
1311     }
1312 
1313     if (mUseBufferManager) {
1314         ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1315         return INVALID_OPERATION;
1316     }
1317 
1318     if (!isVideoStream()) {
1319         ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1320         return INVALID_OPERATION;
1321     }
1322 
1323     if (camera_stream::max_buffers < batchSize) {
1324         ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1325                 camera_stream::max_buffers);
1326         batchSize = camera_stream::max_buffers;
1327     }
1328 
1329     size_t defaultBatchSize = 1;
1330     if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1331         ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1332                 __FUNCTION__, defaultBatchSize, batchSize);
1333         return INVALID_OPERATION;
1334     }
1335 
1336     return OK;
1337 }
1338 
onMinDurationChanged(nsecs_t duration,bool fixedFps)1339 void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
1340     Mutex::Autolock l(mLock);
1341     mMinExpectedDuration = duration;
1342     mFixedFps = fixedFps;
1343 }
1344 
setStreamUseCase(int64_t streamUseCase)1345 void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
1346     Mutex::Autolock l(mLock);
1347     camera_stream::use_case = streamUseCase;
1348 }
1349 
returnPrefetchedBuffersLocked()1350 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
1351     std::vector<Surface::BatchBuffer> batchedBuffers;
1352 
1353     {
1354         std::lock_guard<std::mutex> batchLock(mBatchLock);
1355         if (mBatchedBuffers.size() != 0) {
1356             ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1357                    __FUNCTION__, mBatchedBuffers.size());
1358             batchedBuffers = std::move(mBatchedBuffers);
1359         }
1360     }
1361 
1362     if (batchedBuffers.size() > 0) {
1363         mConsumer->cancelBuffers(batchedBuffers);
1364     }
1365 }
1366 
syncTimestampToDisplayLocked(nsecs_t t,sp<Fence> releaseFence)1367 nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence) {
1368     nsecs_t currentTime = systemTime();
1369     if (!mFixedFps) {
1370         mLastCaptureTime = t;
1371         mLastPresentTime = currentTime;
1372         return t;
1373     }
1374 
1375     ParcelableVsyncEventData parcelableVsyncEventData;
1376     auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
1377     if (res != OK) {
1378         ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
1379                 __FUNCTION__, mId, strerror(-res), res);
1380         mLastCaptureTime = t;
1381         mLastPresentTime = currentTime;
1382         return t;
1383     }
1384 
1385     const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
1386     nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
1387 
1388     // Find the best presentation time without worrying about previous frame's
1389     // presentation time if capture interval is more than kSpacingResetIntervalNs.
1390     //
1391     // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
1392     // there is little risk in starting over and finding the earliest vsync to latch onto.
1393     // - Update captureToPresentTime offset to be used for later frames.
1394     // - Example use cases:
1395     //   - when frame rate drops down to below 20 fps, or
1396     //   - A new streaming session starts (stopPreview followed by
1397     //   startPreview)
1398     //
1399     nsecs_t captureInterval = t - mLastCaptureTime;
1400     if (captureInterval > kSpacingResetIntervalNs) {
1401         for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
1402             const auto& timeline = vsyncEventData.frameTimelines[i];
1403             if (timeline.deadlineTimestamp >= currentTime &&
1404                     timeline.expectedPresentationTime > minPresentT) {
1405                 nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
1406                 mCaptureToPresentOffset = presentT - t;
1407                 mLastCaptureTime = t;
1408                 mLastPresentTime = presentT;
1409 
1410                 // If releaseFence is available, store the fence to check signal
1411                 // time later.
1412                 mRefVsyncData = vsyncEventData;
1413                 mReferenceCaptureTime = t;
1414                 mReferenceArrivalTime = currentTime;
1415                 if (releaseFence->isValid()) {
1416                     mReferenceFrameFence = new Fence(releaseFence->dup());
1417                 } else {
1418                     mFenceSignalOffset = 0;
1419                 }
1420 
1421                 // Move the expected presentation time back by 1/3 of frame interval to
1422                 // mitigate the time drift. Due to time drift, if we directly use the
1423                 // expected presentation time, often times 2 expected presentation time
1424                 // falls into the same VSYNC interval.
1425                 return presentT - vsyncEventData.frameInterval/3;
1426             }
1427         }
1428     }
1429 
1430     // If there is a reference frame release fence, get the signal time and
1431     // update the captureToPresentOffset.
1432     if (mReferenceFrameFence != nullptr) {
1433         mFenceSignalOffset = 0;
1434         nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
1435         // Now that the fence has signaled, recalculate the offsets based on
1436         // the timeline which was actually latched
1437         if (signalTime != INT64_MAX) {
1438             for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
1439                 const auto& timeline = mRefVsyncData.frameTimelines[i];
1440                 if (timeline.deadlineTimestamp >= signalTime) {
1441                     nsecs_t originalOffset = mCaptureToPresentOffset;
1442                     mCaptureToPresentOffset = timeline.expectedPresentationTime
1443                             - mReferenceCaptureTime;
1444                     mLastPresentTime = timeline.expectedPresentationTime;
1445                     mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
1446                             signalTime - mReferenceArrivalTime : 0;
1447 
1448                     ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
1449                             " original offset %" PRId64 " new offset %" PRId64
1450                             " fencesignal offset %" PRId64, __FUNCTION__,
1451                             timeline.deadlineTimestamp, signalTime, originalOffset,
1452                             mCaptureToPresentOffset, mFenceSignalOffset);
1453                     break;
1454                 }
1455             }
1456             mReferenceFrameFence.clear();
1457         }
1458     }
1459 
1460     nsecs_t idealPresentT = t + mCaptureToPresentOffset;
1461     nsecs_t expectedPresentT = mLastPresentTime;
1462     nsecs_t minDiff = INT64_MAX;
1463 
1464     // In fixed FPS case, when frame durations are close to multiples of display refresh
1465     // rate, derive minimum intervals between presentation times based on minimal
1466     // expected duration. The minimum number of Vsyncs is:
1467     // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
1468     // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
1469     // - and so on.
1470     //
1471     // This spaces out the displaying of the frames so that the frame
1472     // presentations are roughly in sync with frame captures.
1473     int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
1474             vsyncEventData.frameInterval;
1475     if (minVsyncs < 0) minVsyncs = 0;
1476     nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
1477 
1478     // In fixed FPS case, if the frame duration deviates from multiples of
1479     // display refresh rate, find the closest Vsync without requiring a minimum
1480     // number of Vsync.
1481     //
1482     // Example: (24fps camera, 60hz refresh):
1483     //   capture readout:  |  t1  |  t1  | .. |  t1  | .. |  t1  | .. |  t1  |
1484     //   display VSYNC:      | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1485     //   |  : 1 frame
1486     //   t1 : 41.67ms
1487     //   t2 : 16.67ms
1488     //   t1/t2 = 2.5
1489     //
1490     //   24fps is a commonly used video frame rate. Because the capture
1491     //   interval is 2.5 times of display refresh interval, the minVsyncs
1492     //   calculation will directly fall at the boundary condition. In this case,
1493     //   we should fall back to the basic logic of finding closest vsync
1494     //   timestamp without worrying about minVsyncs.
1495     float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
1496     float ratioDeviation = std::fabs(
1497             captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
1498     bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
1499     bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
1500 
1501     // Find best timestamp in the vsync timelines:
1502     // - Only use at most kMaxTimelines timelines to avoid long latency
1503     // - Add an extra timeline if display fence is used
1504     // - closest to the ideal presentation time,
1505     // - deadline timestamp is greater than the current time, and
1506     // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
1507     //   the candidate presentation time is at least minInterval in the future compared to last
1508     //   presentation time.
1509     // - For variable FPS, or if the capture interval deviates from refresh
1510     //   interval for more than 5%, find a presentation time closest to the
1511     //   (lastPresentationTime + captureToPresentOffset) instead.
1512     int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
1513     int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
1514             (int)vsyncEventData.frameTimelinesLength);
1515     float biasForShortDelay = 1.0f;
1516     for (int i = 0; i < maxTimelines; i ++) {
1517         const auto& vsyncTime = vsyncEventData.frameTimelines[i];
1518         if (minVsyncs > 0) {
1519             // Bias towards using smaller timeline index:
1520             //   i = 0:                bias = 1
1521             //   i = maxTimelines-1:   bias = -1
1522             biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
1523         }
1524         if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
1525                 vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
1526                 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
1527                  (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
1528                 mLastPresentTime + minInterval +
1529                     static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
1530             expectedPresentT = vsyncTime.expectedPresentationTime;
1531             minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
1532         }
1533     }
1534 
1535     if (expectedPresentT == mLastPresentTime && expectedPresentT <
1536             vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
1537         // Couldn't find a reasonable presentation time. Using last frame's
1538         // presentation time would cause a frame drop. The best option now
1539         // is to use the next VSync as long as the last presentation time
1540         // doesn't already has the maximum latency, in which case dropping the
1541         // buffer is more desired than increasing latency.
1542         //
1543         // Example: (60fps camera, 59.9hz refresh):
1544         //   capture readout:  | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1545         //                      \    \    \     \    \    \    \     \   \
1546         //   queue to BQ:       |    |    |     |    |    |    |      |    |
1547         //                      \    \    \     \    \     \    \      \    \
1548         //   display VSYNC:      | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1549         //
1550         //   |: 1 frame
1551         //   t1 : 16.67ms
1552         //   t2 : 16.69ms
1553         //
1554         // It takes 833 frames for capture readout count and display VSYNC count to be off
1555         // by 1.
1556         //  - At frames [0, 832], presentationTime is set to timeline[0]
1557         //  - At frames [833, 833*2-1], presentationTime is set to timeline[1]
1558         //  - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
1559         //  - At frame 833*3, no presentation time is found because we only
1560         //    search for timeline[0..2].
1561         //  - Drop one buffer is better than further extend the presentation
1562         //    time.
1563         //
1564         // However, if frame 833*2 arrives 16.67ms early (right after frame
1565         // 833*2-1), no presentation time can be found because
1566         // getLatestVsyncEventData is called early. In that case, it's better to
1567         // set presentation time by offseting last presentation time.
1568         expectedPresentT += vsyncEventData.frameInterval;
1569     }
1570 
1571     mLastCaptureTime = t;
1572     mLastPresentTime = expectedPresentT;
1573 
1574     // Move the expected presentation time back by 1/3 of frame interval to
1575     // mitigate the time drift. Due to time drift, if we directly use the
1576     // expected presentation time, often times 2 expected presentation time
1577     // falls into the same VSYNC interval.
1578     return expectedPresentT - vsyncEventData.frameInterval/3;
1579 }
1580 
shouldLogError(status_t res)1581 bool Camera3OutputStream::shouldLogError(status_t res) {
1582     Mutex::Autolock l(mLock);
1583     return shouldLogError(res, mState);
1584 }
1585 
1586 }; // namespace camera3
1587 
1588 }; // namespace android
1589