1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-HeicCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <linux/memfd.h>
22 #include <pthread.h>
23 #include <sys/syscall.h>
24
25 #include <android/hardware/camera/device/3.5/types.h>
26 #include <libyuv.h>
27 #include <gui/Surface.h>
28 #include <utils/Log.h>
29 #include <utils/Trace.h>
30
31 #include <mediadrm/ICrypto.h>
32 #include <media/MediaCodecBuffer.h>
33 #include <media/stagefright/foundation/ABuffer.h>
34 #include <media/stagefright/foundation/MediaDefs.h>
35 #include <media/stagefright/MediaCodecConstants.h>
36
37 #include "common/CameraDeviceBase.h"
38 #include "utils/ExifUtils.h"
39 #include "HeicEncoderInfoManager.h"
40 #include "HeicCompositeStream.h"
41
42 using android::hardware::camera::device::V3_5::CameraBlob;
43 using android::hardware::camera::device::V3_5::CameraBlobId;
44
45 namespace android {
46 namespace camera3 {
47
HeicCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)48 HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
49 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
50 CompositeStream(device, cb),
51 mUseHeic(false),
52 mNumOutputTiles(1),
53 mOutputWidth(0),
54 mOutputHeight(0),
55 mMaxHeicBufferSize(0),
56 mGridWidth(HeicEncoderInfoManager::kGridWidth),
57 mGridHeight(HeicEncoderInfoManager::kGridHeight),
58 mGridRows(1),
59 mGridCols(1),
60 mUseGrid(false),
61 mAppSegmentStreamId(-1),
62 mAppSegmentSurfaceId(-1),
63 mMainImageStreamId(-1),
64 mMainImageSurfaceId(-1),
65 mYuvBufferAcquired(false),
66 mProducerListener(new ProducerListener()),
67 mDequeuedOutputBufferCnt(0),
68 mLockedAppSegmentBufferCnt(0),
69 mCodecOutputCounter(0),
70 mQuality(-1),
71 mGridTimestampUs(0),
72 mStatusId(StatusTracker::NO_STATUS_ID) {
73 }
74
~HeicCompositeStream()75 HeicCompositeStream::~HeicCompositeStream() {
76 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
77 // memory/resource leak.
78 deinitCodec();
79
80 mInputAppSegmentBuffers.clear();
81 mCodecOutputBuffers.clear();
82
83 mAppSegmentStreamId = -1;
84 mAppSegmentSurfaceId = -1;
85 mAppSegmentConsumer.clear();
86 mAppSegmentSurface.clear();
87
88 mMainImageStreamId = -1;
89 mMainImageSurfaceId = -1;
90 mMainImageConsumer.clear();
91 mMainImageSurface.clear();
92 }
93
isHeicCompositeStream(const sp<Surface> & surface)94 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
95 ANativeWindow *anw = surface.get();
96 status_t err;
97 int format;
98 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
99 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
100 err);
101 ALOGE("%s: %s", __FUNCTION__, msg.string());
102 return false;
103 }
104
105 int dataspace;
106 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
107 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
108 err);
109 ALOGE("%s: %s", __FUNCTION__, msg.string());
110 return false;
111 }
112
113 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
114 }
115
createInternalStreams(const std::vector<sp<Surface>> & consumers,bool,uint32_t width,uint32_t height,int format,camera3_stream_rotation_t rotation,int * id,const String8 & physicalCameraId,std::vector<int> * surfaceIds,int,bool)116 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
117 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
118 camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
119 std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
120
121 sp<CameraDeviceBase> device = mDevice.promote();
122 if (!device.get()) {
123 ALOGE("%s: Invalid camera device!", __FUNCTION__);
124 return NO_INIT;
125 }
126
127 status_t res = initializeCodec(width, height, device);
128 if (res != OK) {
129 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
130 __FUNCTION__, strerror(-res), res);
131 return NO_INIT;
132 }
133
134 sp<IGraphicBufferProducer> producer;
135 sp<IGraphicBufferConsumer> consumer;
136 BufferQueue::createBufferQueue(&producer, &consumer);
137 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
138 mAppSegmentConsumer->setFrameAvailableListener(this);
139 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
140 mAppSegmentSurface = new Surface(producer);
141
142 mStaticInfo = device->info();
143
144 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
145 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
146 if (res == OK) {
147 mAppSegmentSurfaceId = (*surfaceIds)[0];
148 } else {
149 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
150 strerror(-res), res);
151 return res;
152 }
153
154 if (!mUseGrid) {
155 res = mCodec->createInputSurface(&producer);
156 if (res != OK) {
157 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
158 __FUNCTION__, strerror(-res), res);
159 return res;
160 }
161 } else {
162 BufferQueue::createBufferQueue(&producer, &consumer);
163 mMainImageConsumer = new CpuConsumer(consumer, 1);
164 mMainImageConsumer->setFrameAvailableListener(this);
165 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
166 }
167 mMainImageSurface = new Surface(producer);
168
169 res = mCodec->start();
170 if (res != OK) {
171 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
172 strerror(-res), res);
173 return res;
174 }
175
176 std::vector<int> sourceSurfaceId;
177 //Use YUV_888 format if framework tiling is needed.
178 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
179 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
180 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
181 rotation, id, physicalCameraId, &sourceSurfaceId);
182 if (res == OK) {
183 mMainImageSurfaceId = sourceSurfaceId[0];
184 mMainImageStreamId = *id;
185 } else {
186 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
187 strerror(-res), res);
188 return res;
189 }
190
191 mOutputSurface = consumers[0];
192 res = registerCompositeStreamListener(mMainImageStreamId);
193 if (res != OK) {
194 ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
195 strerror(-res), res);
196 return res;
197 }
198
199 res = registerCompositeStreamListener(mAppSegmentStreamId);
200 if (res != OK) {
201 ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
202 strerror(-res), res);
203 return res;
204 }
205
206 initCopyRowFunction(width);
207 return res;
208 }
209
deleteInternalStreams()210 status_t HeicCompositeStream::deleteInternalStreams() {
211 requestExit();
212 auto res = join();
213 if (res != OK) {
214 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
215 strerror(-res), res);
216 }
217
218 deinitCodec();
219
220 if (mAppSegmentStreamId >= 0) {
221 // Camera devices may not be valid after switching to offline mode.
222 // In this case, all offline streams including internal composite streams
223 // are managed and released by the offline session.
224 sp<CameraDeviceBase> device = mDevice.promote();
225 if (device.get() != nullptr) {
226 res = device->deleteStream(mAppSegmentStreamId);
227 }
228
229 mAppSegmentStreamId = -1;
230 }
231
232 if (mOutputSurface != nullptr) {
233 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
234 mOutputSurface.clear();
235 }
236
237 sp<StatusTracker> statusTracker = mStatusTracker.promote();
238 if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
239 statusTracker->removeComponent(mStatusId);
240 mStatusId = StatusTracker::NO_STATUS_ID;
241 }
242
243 if (mPendingInputFrames.size() > 0) {
244 ALOGW("%s: mPendingInputFrames has %zu stale entries",
245 __FUNCTION__, mPendingInputFrames.size());
246 mPendingInputFrames.clear();
247 }
248
249 return res;
250 }
251
onBufferReleased(const BufferInfo & bufferInfo)252 void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
253 Mutex::Autolock l(mMutex);
254
255 if (bufferInfo.mError) return;
256
257 if (bufferInfo.mStreamId == mMainImageStreamId) {
258 mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
259 mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
260 ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
261 __FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
262 } else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
263 mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
264 ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
265 __FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
266 }
267 }
268
269 // We need to get the settings early to handle the case where the codec output
270 // arrives earlier than result metadata.
onBufferRequestForFrameNumber(uint64_t frameNumber,int streamId,const CameraMetadata & settings)271 void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
272 const CameraMetadata& settings) {
273 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
274
275 Mutex::Autolock l(mMutex);
276 if (mErrorState || (streamId != getStreamId())) {
277 return;
278 }
279
280 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
281
282 camera_metadata_ro_entry entry;
283
284 int32_t orientation = 0;
285 entry = settings.find(ANDROID_JPEG_ORIENTATION);
286 if (entry.count == 1) {
287 orientation = entry.data.i32[0];
288 }
289
290 int32_t quality = kDefaultJpegQuality;
291 entry = settings.find(ANDROID_JPEG_QUALITY);
292 if (entry.count == 1) {
293 quality = entry.data.i32[0];
294 }
295
296 mSettingsByFrameNumber[frameNumber] = {orientation, quality};
297 }
298
onFrameAvailable(const BufferItem & item)299 void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
300 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
301 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
302 __func__, ns2ms(item.mTimestamp));
303
304 Mutex::Autolock l(mMutex);
305 if (!mErrorState) {
306 mInputAppSegmentBuffers.push_back(item.mTimestamp);
307 mInputReadyCondition.signal();
308 }
309 } else if (item.mDataSpace == kHeifDataSpace) {
310 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
311 __func__, ns2ms(item.mTimestamp));
312
313 Mutex::Autolock l(mMutex);
314 if (!mUseGrid) {
315 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
316 __FUNCTION__);
317 return;
318 }
319 if (!mErrorState) {
320 mInputYuvBuffers.push_back(item.mTimestamp);
321 mInputReadyCondition.signal();
322 }
323 } else {
324 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
325 }
326 }
327
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & ch,std::vector<OutputStreamInfo> * compositeOutput)328 status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
329 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
330 if (compositeOutput == nullptr) {
331 return BAD_VALUE;
332 }
333
334 compositeOutput->clear();
335
336 bool useGrid, useHeic;
337 bool isSizeSupported = isSizeSupportedByHeifEncoder(
338 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
339 if (!isSizeSupported) {
340 // Size is not supported by either encoder.
341 return OK;
342 }
343
344 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
345
346 // JPEG APPS segments Blob stream info
347 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
348 (*compositeOutput)[0].height = 1;
349 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
350 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
351 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
352
353 // YUV/IMPLEMENTATION_DEFINED stream info
354 (*compositeOutput)[1].width = streamInfo.width;
355 (*compositeOutput)[1].height = streamInfo.height;
356 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
357 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
358 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
359 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
360 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
361
362 return NO_ERROR;
363 }
364
isSizeSupportedByHeifEncoder(int32_t width,int32_t height,bool * useHeic,bool * useGrid,int64_t * stall,AString * hevcName)365 bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
366 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
367 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
368 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
369 }
370
isInMemoryTempFileSupported()371 bool HeicCompositeStream::isInMemoryTempFileSupported() {
372 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
373 if (memfd == -1) {
374 if (errno != ENOSYS) {
375 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
376 }
377 return false;
378 }
379 close(memfd);
380 return true;
381 }
382
onHeicOutputFrameAvailable(const CodecOutputBufferInfo & outputBufferInfo)383 void HeicCompositeStream::onHeicOutputFrameAvailable(
384 const CodecOutputBufferInfo& outputBufferInfo) {
385 Mutex::Autolock l(mMutex);
386
387 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
388 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
389 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
390
391 if (!mErrorState) {
392 if ((outputBufferInfo.size > 0) &&
393 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
394 mCodecOutputBuffers.push_back(outputBufferInfo);
395 mInputReadyCondition.signal();
396 } else {
397 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
398 outputBufferInfo.size, outputBufferInfo.flags);
399 mCodec->releaseOutputBuffer(outputBufferInfo.index);
400 }
401 } else {
402 mCodec->releaseOutputBuffer(outputBufferInfo.index);
403 }
404 }
405
onHeicInputFrameAvailable(int32_t index)406 void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
407 Mutex::Autolock l(mMutex);
408
409 if (!mUseGrid) {
410 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
411 return;
412 }
413
414 mCodecInputBuffers.push_back(index);
415 mInputReadyCondition.signal();
416 }
417
onHeicFormatChanged(sp<AMessage> & newFormat)418 void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
419 if (newFormat == nullptr) {
420 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
421 return;
422 }
423
424 Mutex::Autolock l(mMutex);
425
426 AString mime;
427 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
428 newFormat->findString(KEY_MIME, &mime);
429 if (mime != mimeHeic) {
430 // For HEVC codec, below keys need to be filled out or overwritten so that the
431 // muxer can handle them as HEIC output image.
432 newFormat->setString(KEY_MIME, mimeHeic);
433 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
434 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
435 if (mUseGrid) {
436 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
437 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
438 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
439 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
440 }
441 }
442 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
443
444 int32_t gridRows, gridCols;
445 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
446 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
447 mNumOutputTiles = gridRows * gridCols;
448 } else {
449 mNumOutputTiles = 1;
450 }
451
452 mFormat = newFormat;
453
454 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
455 mInputReadyCondition.signal();
456 }
457
onHeicCodecError()458 void HeicCompositeStream::onHeicCodecError() {
459 Mutex::Autolock l(mMutex);
460 mErrorState = true;
461 }
462
configureStream()463 status_t HeicCompositeStream::configureStream() {
464 if (isRunning()) {
465 // Processing thread is already running, nothing more to do.
466 return NO_ERROR;
467 }
468
469 if (mOutputSurface.get() == nullptr) {
470 ALOGE("%s: No valid output surface set!", __FUNCTION__);
471 return NO_INIT;
472 }
473
474 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
475 if (res != OK) {
476 ALOGE("%s: Unable to connect to native window for stream %d",
477 __FUNCTION__, mMainImageStreamId);
478 return res;
479 }
480
481 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
482 != OK) {
483 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
484 mMainImageStreamId);
485 return res;
486 }
487
488 ANativeWindow *anwConsumer = mOutputSurface.get();
489 int maxConsumerBuffers;
490 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
491 &maxConsumerBuffers)) != OK) {
492 ALOGE("%s: Unable to query consumer undequeued"
493 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
494 return res;
495 }
496
497 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
498 // buffer count.
499 if ((res = native_window_set_buffer_count(
500 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
501 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
502 return res;
503 }
504
505 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
506 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
507 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
508 return res;
509 }
510
511 sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
512 if (statusTracker != nullptr) {
513 mStatusId = statusTracker->addComponent();
514 }
515
516 run("HeicCompositeStreamProc");
517
518 return NO_ERROR;
519 }
520
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)521 status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
522 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
523 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
524 (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
525 outputStreamIds->push_back(mAppSegmentStreamId);
526 }
527 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
528
529 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
530 (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
531 outputStreamIds->push_back(mMainImageStreamId);
532 }
533 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
534
535 if (currentStreamId != nullptr) {
536 *currentStreamId = mMainImageStreamId;
537 }
538
539 return NO_ERROR;
540 }
541
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)542 status_t HeicCompositeStream::insertCompositeStreamIds(
543 std::vector<int32_t>* compositeStreamIds /*out*/) {
544 if (compositeStreamIds == nullptr) {
545 return BAD_VALUE;
546 }
547
548 compositeStreamIds->push_back(mAppSegmentStreamId);
549 compositeStreamIds->push_back(mMainImageStreamId);
550
551 return OK;
552 }
553
onShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)554 void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
555 Mutex::Autolock l(mMutex);
556 if (mErrorState) {
557 return;
558 }
559
560 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
561 ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
562 resultExtras.frameNumber, timestamp, resultExtras.requestId);
563 mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
564 mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
565 mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
566 mInputReadyCondition.signal();
567 }
568 }
569
compilePendingInputLocked()570 void HeicCompositeStream::compilePendingInputLocked() {
571 auto i = mSettingsByFrameNumber.begin();
572 while (i != mSettingsByFrameNumber.end()) {
573 if (i->second.shutterNotified) {
574 mPendingInputFrames[i->first].orientation = i->second.orientation;
575 mPendingInputFrames[i->first].quality = i->second.quality;
576 mPendingInputFrames[i->first].timestamp = i->second.timestamp;
577 mPendingInputFrames[i->first].requestId = i->second.requestId;
578 ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
579 i->first, i->second.timestamp);
580 i = mSettingsByFrameNumber.erase(i);
581
582 // Set encoder quality if no inflight encoding
583 if (mPendingInputFrames.size() == 1) {
584 sp<StatusTracker> statusTracker = mStatusTracker.promote();
585 if (statusTracker != nullptr) {
586 statusTracker->markComponentActive(mStatusId);
587 ALOGV("%s: Mark component as active", __FUNCTION__);
588 }
589
590 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
591 updateCodecQualityLocked(newQuality);
592 }
593 } else {
594 i++;
595 }
596 }
597
598 while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
599 CpuConsumer::LockedBuffer imgBuffer;
600 auto it = mInputAppSegmentBuffers.begin();
601 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
602 if (res == NOT_ENOUGH_DATA) {
603 // Can not lock any more buffers.
604 break;
605 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
606 if (res != OK) {
607 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
608 strerror(-res), res);
609 } else {
610 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
611 " received buffer with time stamp: %" PRId64, __FUNCTION__,
612 *it, imgBuffer.timestamp);
613 mAppSegmentConsumer->unlockBuffer(imgBuffer);
614 }
615 mPendingInputFrames[*it].error = true;
616 mInputAppSegmentBuffers.erase(it);
617 continue;
618 }
619
620 if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
621 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
622 mAppSegmentFrameNumbers.front());
623 mInputAppSegmentBuffers.erase(it);
624 mAppSegmentFrameNumbers.pop();
625 continue;
626 }
627
628 int64_t frameNumber = mAppSegmentFrameNumbers.front();
629 // If mPendingInputFrames doesn't contain the expected frame number, the captured
630 // input app segment frame must have been dropped via a buffer error. Simply
631 // return the buffer to the buffer queue.
632 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
633 (mPendingInputFrames[frameNumber].error)) {
634 mAppSegmentConsumer->unlockBuffer(imgBuffer);
635 } else {
636 mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
637 mLockedAppSegmentBufferCnt++;
638 }
639 mInputAppSegmentBuffers.erase(it);
640 mAppSegmentFrameNumbers.pop();
641 }
642
643 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
644 CpuConsumer::LockedBuffer imgBuffer;
645 auto it = mInputYuvBuffers.begin();
646 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
647 if (res == NOT_ENOUGH_DATA) {
648 // Can not lock any more buffers.
649 break;
650 } else if (res != OK) {
651 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
652 strerror(-res), res);
653 mPendingInputFrames[*it].error = true;
654 mInputYuvBuffers.erase(it);
655 continue;
656 } else if (*it != imgBuffer.timestamp) {
657 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
658 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
659 mPendingInputFrames[*it].error = true;
660 mInputYuvBuffers.erase(it);
661 continue;
662 }
663
664 if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
665 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
666 mMainImageFrameNumbers.front());
667 mInputYuvBuffers.erase(it);
668 mMainImageFrameNumbers.pop();
669 continue;
670 }
671
672 int64_t frameNumber = mMainImageFrameNumbers.front();
673 // If mPendingInputFrames doesn't contain the expected frame number, the captured
674 // input main image must have been dropped via a buffer error. Simply
675 // return the buffer to the buffer queue.
676 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
677 (mPendingInputFrames[frameNumber].error)) {
678 mMainImageConsumer->unlockBuffer(imgBuffer);
679 } else {
680 mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
681 mYuvBufferAcquired = true;
682 }
683 mInputYuvBuffers.erase(it);
684 mMainImageFrameNumbers.pop();
685 }
686
687 while (!mCodecOutputBuffers.empty()) {
688 auto it = mCodecOutputBuffers.begin();
689 // Assume encoder input to output is FIFO, use a queue to look up
690 // frameNumber when handling codec outputs.
691 int64_t bufferFrameNumber = -1;
692 if (mCodecOutputBufferFrameNumbers.empty()) {
693 ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
694 break;
695 } else {
696 // Direct mapping between camera frame number and codec timestamp (in us).
697 bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
698 mCodecOutputCounter++;
699 if (mCodecOutputCounter == mNumOutputTiles) {
700 mCodecOutputBufferFrameNumbers.pop();
701 mCodecOutputCounter = 0;
702 }
703
704 mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
705 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
706 __FUNCTION__, bufferFrameNumber, it->timeUs);
707 }
708 mCodecOutputBuffers.erase(it);
709 }
710
711 while (!mCaptureResults.empty()) {
712 auto it = mCaptureResults.begin();
713 // Negative frame number indicates that something went wrong during the capture result
714 // collection process.
715 int64_t frameNumber = std::get<0>(it->second);
716 if (it->first >= 0 &&
717 mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
718 if (mPendingInputFrames[frameNumber].timestamp == it->first) {
719 mPendingInputFrames[frameNumber].result =
720 std::make_unique<CameraMetadata>(std::get<1>(it->second));
721 } else {
722 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
723 "shutter and capture result! before: %" PRId64 ", after: %" PRId64,
724 __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
725 it->first);
726 }
727 }
728 mCaptureResults.erase(it);
729 }
730
731 // mErrorFrameNumbers stores frame number of dropped buffers.
732 auto it = mErrorFrameNumbers.begin();
733 while (it != mErrorFrameNumbers.end()) {
734 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
735 mPendingInputFrames[*it].error = true;
736 } else {
737 //Error callback is guaranteed to arrive after shutter notify, which
738 //results in mPendingInputFrames being populated.
739 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
740 *it);
741 }
742 it = mErrorFrameNumbers.erase(it);
743 }
744
745 // mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
746 it = mExifErrorFrameNumbers.begin();
747 while (it != mExifErrorFrameNumbers.end()) {
748 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
749 mPendingInputFrames[*it].exifError = true;
750 }
751 it = mExifErrorFrameNumbers.erase(it);
752 }
753
754 // Distribute codec input buffers to be filled out from YUV output
755 for (auto it = mPendingInputFrames.begin();
756 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
757 InputFrame& inputFrame(it->second);
758 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
759 // Available input tiles that are required for the current input
760 // image.
761 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
762 mGridRows * mGridCols - inputFrame.codecInputCounter);
763 for (size_t i = 0; i < newInputTiles; i++) {
764 CodecInputBufferInfo inputInfo =
765 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
766 inputFrame.codecInputBuffers.push_back(inputInfo);
767
768 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
769 inputFrame.codecInputCounter++;
770 }
771 break;
772 }
773 }
774 }
775
getNextReadyInputLocked(int64_t * frameNumber)776 bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
777 if (frameNumber == nullptr) {
778 return false;
779 }
780
781 bool newInputAvailable = false;
782 for (auto& it : mPendingInputFrames) {
783 // New input is considered to be available only if:
784 // 1. input buffers are ready, or
785 // 2. App segment and muxer is created, or
786 // 3. A codec output tile is ready, and an output buffer is available.
787 // This makes sure that muxer gets created only when an output tile is
788 // generated, because right now we only handle 1 HEIC output buffer at a
789 // time (max dequeued buffer count is 1).
790 bool appSegmentReady =
791 (it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
792 !it.second.appSegmentWritten && it.second.result != nullptr &&
793 it.second.muxer != nullptr;
794 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
795 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
796 (!it.second.codecInputBuffers.empty());
797 bool hasOutputBuffer = it.second.muxer != nullptr ||
798 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
799 if ((!it.second.error) &&
800 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
801 *frameNumber = it.first;
802 if (it.second.format == nullptr && mFormat != nullptr) {
803 it.second.format = mFormat->dup();
804 }
805 newInputAvailable = true;
806 break;
807 }
808 }
809
810 return newInputAvailable;
811 }
812
getNextFailingInputLocked()813 int64_t HeicCompositeStream::getNextFailingInputLocked() {
814 int64_t res = -1;
815
816 for (const auto& it : mPendingInputFrames) {
817 if (it.second.error) {
818 res = it.first;
819 break;
820 }
821 }
822
823 return res;
824 }
825
processInputFrame(int64_t frameNumber,InputFrame & inputFrame)826 status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
827 InputFrame &inputFrame) {
828 ATRACE_CALL();
829 status_t res = OK;
830
831 bool appSegmentReady =
832 (inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
833 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
834 inputFrame.muxer != nullptr;
835 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
836 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
837 !inputFrame.codecInputBuffers.empty();
838 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
839 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
840
841 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
842 " dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
843 appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
844 inputFrame.timestamp);
845
846 // Handle inputs for Hevc tiling
847 if (codecInputReady) {
848 res = processCodecInputFrame(inputFrame);
849 if (res != OK) {
850 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
851 strerror(-res), res);
852 return res;
853 }
854 }
855
856 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
857 return OK;
858 }
859
860 // Initialize and start muxer if not yet done so. In this case,
861 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
862 // to be false, and the function must have returned early.
863 if (inputFrame.muxer == nullptr) {
864 res = startMuxerForInputFrame(frameNumber, inputFrame);
865 if (res != OK) {
866 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
867 strerror(-res), res);
868 return res;
869 }
870 }
871
872 // Write JPEG APP segments data to the muxer.
873 if (appSegmentReady) {
874 res = processAppSegment(frameNumber, inputFrame);
875 if (res != OK) {
876 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
877 strerror(-res), res);
878 return res;
879 }
880 }
881
882 // Write media codec bitstream buffers to muxer.
883 while (!inputFrame.codecOutputBuffers.empty()) {
884 res = processOneCodecOutputFrame(frameNumber, inputFrame);
885 if (res != OK) {
886 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
887 strerror(-res), res);
888 return res;
889 }
890 }
891
892 if (inputFrame.pendingOutputTiles == 0) {
893 if (inputFrame.appSegmentWritten) {
894 res = processCompletedInputFrame(frameNumber, inputFrame);
895 if (res != OK) {
896 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
897 strerror(-res), res);
898 return res;
899 }
900 } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
901 ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
902 kMaxAcquiredAppSegment);
903 return INVALID_OPERATION;
904 }
905 }
906
907 return res;
908 }
909
startMuxerForInputFrame(int64_t frameNumber,InputFrame & inputFrame)910 status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
911 sp<ANativeWindow> outputANW = mOutputSurface;
912
913 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
914 if (res != OK) {
915 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
916 res);
917 return res;
918 }
919 mDequeuedOutputBufferCnt++;
920
921 // Combine current thread id, stream id and timestamp to uniquely identify image.
922 std::ostringstream tempOutputFile;
923 tempOutputFile << "HEIF-" << pthread_self() << "-"
924 << getStreamId() << "-" << frameNumber;
925 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
926 if (inputFrame.fileFd < 0) {
927 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
928 tempOutputFile.str().c_str(), errno);
929 return NO_INIT;
930 }
931 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
932 if (inputFrame.muxer == nullptr) {
933 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
934 __FUNCTION__, inputFrame.fileFd);
935 return NO_INIT;
936 }
937
938 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
939 if (res != OK) {
940 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
941 strerror(-res), res);
942 return res;
943 }
944
945 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
946 if (trackId < 0) {
947 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
948 return NO_INIT;
949 }
950
951 inputFrame.trackIndex = trackId;
952 inputFrame.pendingOutputTiles = mNumOutputTiles;
953
954 res = inputFrame.muxer->start();
955 if (res != OK) {
956 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
957 __FUNCTION__, strerror(-res), res);
958 return res;
959 }
960
961 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
962 frameNumber);
963 return OK;
964 }
965
processAppSegment(int64_t frameNumber,InputFrame & inputFrame)966 status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
967 size_t app1Size = 0;
968 size_t appSegmentSize = 0;
969 if (!inputFrame.exifError) {
970 appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
971 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
972 &app1Size);
973 if (appSegmentSize == 0) {
974 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
975 return NO_INIT;
976 }
977 }
978
979 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
980 auto exifRes = inputFrame.exifError ?
981 exifUtils->initializeEmpty() :
982 exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
983 if (!exifRes) {
984 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
985 return BAD_VALUE;
986 }
987 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
988 mOutputWidth, mOutputHeight);
989 if (!exifRes) {
990 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
991 return BAD_VALUE;
992 }
993 exifRes = exifUtils->setOrientation(inputFrame.orientation);
994 if (!exifRes) {
995 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
996 return BAD_VALUE;
997 }
998 exifRes = exifUtils->generateApp1();
999 if (!exifRes) {
1000 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
1001 return BAD_VALUE;
1002 }
1003
1004 unsigned int newApp1Length = exifUtils->getApp1Length();
1005 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
1006
1007 //Assemble the APP1 marker buffer required by MediaCodec
1008 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
1009 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
1010 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
1011 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
1012 appSegmentSize - app1Size + newApp1Length;
1013 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
1014 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
1015 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
1016 if (appSegmentSize - app1Size > 0) {
1017 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
1018 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
1019 }
1020
1021 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
1022 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1023 inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
1024 delete[] appSegmentBuffer;
1025
1026 if (res != OK) {
1027 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
1028 __FUNCTION__, strerror(-res), res);
1029 return res;
1030 }
1031
1032 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
1033 __FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
1034 inputFrame.appSegmentBuffer.height, app1Size);
1035
1036 inputFrame.appSegmentWritten = true;
1037 // Release the buffer now so any pending input app segments can be processed
1038 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
1039 inputFrame.appSegmentBuffer.data = nullptr;
1040 inputFrame.exifError = false;
1041 mLockedAppSegmentBufferCnt--;
1042
1043 return OK;
1044 }
1045
processCodecInputFrame(InputFrame & inputFrame)1046 status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
1047 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
1048 sp<MediaCodecBuffer> buffer;
1049 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
1050 if (res != OK) {
1051 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1052 strerror(-res), res);
1053 return res;
1054 }
1055
1056 // Copy one tile from source to destination.
1057 size_t tileX = inputBuffer.tileIndex % mGridCols;
1058 size_t tileY = inputBuffer.tileIndex / mGridCols;
1059 size_t top = mGridHeight * tileY;
1060 size_t left = mGridWidth * tileX;
1061 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
1062 mOutputWidth - tileX * mGridWidth : mGridWidth;
1063 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
1064 mOutputHeight - tileY * mGridHeight : mGridHeight;
1065 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
1066 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1067 inputBuffer.timeUs);
1068
1069 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
1070 if (res != OK) {
1071 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1072 strerror(-res), res);
1073 return res;
1074 }
1075
1076 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1077 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1078 if (res != OK) {
1079 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1080 __FUNCTION__, strerror(-res), res);
1081 return res;
1082 }
1083 }
1084
1085 inputFrame.codecInputBuffers.clear();
1086 return OK;
1087 }
1088
processOneCodecOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1089 status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
1090 InputFrame &inputFrame) {
1091 auto it = inputFrame.codecOutputBuffers.begin();
1092 sp<MediaCodecBuffer> buffer;
1093 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1094 if (res != OK) {
1095 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1096 __FUNCTION__, it->index, strerror(-res), res);
1097 return res;
1098 }
1099 if (buffer == nullptr) {
1100 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1101 __FUNCTION__, it->index);
1102 return BAD_VALUE;
1103 }
1104
1105 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1106 res = inputFrame.muxer->writeSampleData(
1107 aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
1108 if (res != OK) {
1109 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1110 __FUNCTION__, it->index, strerror(-res), res);
1111 return res;
1112 }
1113
1114 mCodec->releaseOutputBuffer(it->index);
1115 if (inputFrame.pendingOutputTiles == 0) {
1116 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1117 } else {
1118 inputFrame.pendingOutputTiles--;
1119 }
1120
1121 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
1122
1123 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1124 __FUNCTION__, frameNumber, it->index);
1125 return OK;
1126 }
1127
processCompletedInputFrame(int64_t frameNumber,InputFrame & inputFrame)1128 status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
1129 InputFrame &inputFrame) {
1130 sp<ANativeWindow> outputANW = mOutputSurface;
1131 inputFrame.muxer->stop();
1132
1133 // Copy the content of the file to memory.
1134 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1135 void* dstBuffer;
1136 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1137 if (res != OK) {
1138 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1139 strerror(-res), res);
1140 return res;
1141 }
1142
1143 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1144 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1145 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1146 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1147 return BAD_VALUE;
1148 }
1149
1150 lseek(inputFrame.fileFd, 0, SEEK_SET);
1151 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1152 if (bytesRead < fSize) {
1153 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1154 return BAD_VALUE;
1155 }
1156
1157 close(inputFrame.fileFd);
1158 inputFrame.fileFd = -1;
1159
1160 // Fill in HEIC header
1161 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1162 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1163 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1164 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1165 blobHeader->blobSize = fSize;
1166
1167 res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
1168 if (res != OK) {
1169 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1170 __FUNCTION__, getStreamId(), strerror(-res), res);
1171 return res;
1172 }
1173
1174 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1175 if (res != OK) {
1176 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1177 strerror(-res), res);
1178 return res;
1179 }
1180 inputFrame.anb = nullptr;
1181 mDequeuedOutputBufferCnt--;
1182
1183 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
1184 ATRACE_ASYNC_END("HEIC capture", frameNumber);
1185 return OK;
1186 }
1187
1188
releaseInputFrameLocked(int64_t frameNumber,InputFrame * inputFrame)1189 void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
1190 InputFrame *inputFrame /*out*/) {
1191 if (inputFrame == nullptr) {
1192 return;
1193 }
1194
1195 if (inputFrame->appSegmentBuffer.data != nullptr) {
1196 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1197 inputFrame->appSegmentBuffer.data = nullptr;
1198 }
1199
1200 while (!inputFrame->codecOutputBuffers.empty()) {
1201 auto it = inputFrame->codecOutputBuffers.begin();
1202 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1203 mCodec->releaseOutputBuffer(it->index);
1204 inputFrame->codecOutputBuffers.erase(it);
1205 }
1206
1207 if (inputFrame->yuvBuffer.data != nullptr) {
1208 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1209 inputFrame->yuvBuffer.data = nullptr;
1210 mYuvBufferAcquired = false;
1211 }
1212
1213 while (!inputFrame->codecInputBuffers.empty()) {
1214 auto it = inputFrame->codecInputBuffers.begin();
1215 inputFrame->codecInputBuffers.erase(it);
1216 }
1217
1218 if (inputFrame->error || mErrorState) {
1219 ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
1220 notifyError(frameNumber, inputFrame->requestId);
1221 }
1222
1223 if (inputFrame->fileFd >= 0) {
1224 close(inputFrame->fileFd);
1225 inputFrame->fileFd = -1;
1226 }
1227
1228 if (inputFrame->anb != nullptr) {
1229 sp<ANativeWindow> outputANW = mOutputSurface;
1230 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1231 inputFrame->anb = nullptr;
1232
1233 mDequeuedOutputBufferCnt--;
1234 }
1235 }
1236
releaseInputFramesLocked()1237 void HeicCompositeStream::releaseInputFramesLocked() {
1238 auto it = mPendingInputFrames.begin();
1239 bool inputFrameDone = false;
1240 while (it != mPendingInputFrames.end()) {
1241 auto& inputFrame = it->second;
1242 if (inputFrame.error ||
1243 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1244 releaseInputFrameLocked(it->first, &inputFrame);
1245 it = mPendingInputFrames.erase(it);
1246 inputFrameDone = true;
1247 } else {
1248 it++;
1249 }
1250 }
1251
1252 // Update codec quality based on first upcoming input frame.
1253 // Note that when encoding is in surface mode, currently there is no
1254 // way for camera service to synchronize quality setting on a per-frame
1255 // basis: we don't get notification when codec is ready to consume a new
1256 // input frame. So we update codec quality on a best-effort basis.
1257 if (inputFrameDone) {
1258 auto firstPendingFrame = mPendingInputFrames.begin();
1259 if (firstPendingFrame != mPendingInputFrames.end()) {
1260 updateCodecQualityLocked(firstPendingFrame->second.quality);
1261 } else {
1262 markTrackerIdle();
1263 }
1264 }
1265 }
1266
initializeCodec(uint32_t width,uint32_t height,const sp<CameraDeviceBase> & cameraDevice)1267 status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1268 const sp<CameraDeviceBase>& cameraDevice) {
1269 ALOGV("%s", __FUNCTION__);
1270
1271 bool useGrid = false;
1272 AString hevcName;
1273 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1274 &mUseHeic, &useGrid, nullptr, &hevcName);
1275 if (!isSizeSupported) {
1276 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1277 __FUNCTION__, width, height);
1278 return BAD_VALUE;
1279 }
1280
1281 // Create Looper for MediaCodec.
1282 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1283 mCodecLooper = new ALooper;
1284 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1285 status_t res = mCodecLooper->start(
1286 false, // runOnCallingThread
1287 false, // canCallJava
1288 PRIORITY_AUDIO);
1289 if (res != OK) {
1290 ALOGE("%s: Failed to start codec looper: %s (%d)",
1291 __FUNCTION__, strerror(-res), res);
1292 return NO_INIT;
1293 }
1294
1295 // Create HEIC/HEVC codec.
1296 if (mUseHeic) {
1297 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1298 } else {
1299 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1300 }
1301 if (mCodec == nullptr) {
1302 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1303 return NO_INIT;
1304 }
1305
1306 // Create Looper and handler for Codec callback.
1307 mCodecCallbackHandler = new CodecCallbackHandler(this);
1308 if (mCodecCallbackHandler == nullptr) {
1309 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1310 return NO_MEMORY;
1311 }
1312 mCallbackLooper = new ALooper;
1313 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1314 res = mCallbackLooper->start(
1315 false, // runOnCallingThread
1316 false, // canCallJava
1317 PRIORITY_AUDIO);
1318 if (res != OK) {
1319 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1320 __FUNCTION__, strerror(-res), res);
1321 return NO_INIT;
1322 }
1323 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1324
1325 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1326 res = mCodec->setCallback(mAsyncNotify);
1327 if (res != OK) {
1328 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1329 strerror(-res), res);
1330 return res;
1331 }
1332
1333 // Create output format and configure the Codec.
1334 sp<AMessage> outputFormat = new AMessage();
1335 outputFormat->setString(KEY_MIME, desiredMime);
1336 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1337 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1338 // Ask codec to skip timestamp check and encode all frames.
1339 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1340
1341 int32_t gridWidth, gridHeight, gridRows, gridCols;
1342 if (useGrid || mUseHeic) {
1343 gridWidth = HeicEncoderInfoManager::kGridWidth;
1344 gridHeight = HeicEncoderInfoManager::kGridHeight;
1345 gridRows = (height + gridHeight - 1)/gridHeight;
1346 gridCols = (width + gridWidth - 1)/gridWidth;
1347
1348 if (mUseHeic) {
1349 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1350 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1351 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1352 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1353 }
1354
1355 } else {
1356 gridWidth = width;
1357 gridHeight = height;
1358 gridRows = 1;
1359 gridCols = 1;
1360 }
1361
1362 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1363 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1364 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1365 outputFormat->setInt32(KEY_COLOR_FORMAT,
1366 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
1367 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
1368 // This only serves as a hint to encoder when encoding is not real-time.
1369 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1370
1371 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1372 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1373 if (res != OK) {
1374 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1375 strerror(-res), res);
1376 return res;
1377 }
1378
1379 mGridWidth = gridWidth;
1380 mGridHeight = gridHeight;
1381 mGridRows = gridRows;
1382 mGridCols = gridCols;
1383 mUseGrid = useGrid;
1384 mOutputWidth = width;
1385 mOutputHeight = height;
1386 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1387 mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
1388
1389 return OK;
1390 }
1391
deinitCodec()1392 void HeicCompositeStream::deinitCodec() {
1393 ALOGV("%s", __FUNCTION__);
1394 if (mCodec != nullptr) {
1395 mCodec->stop();
1396 mCodec->release();
1397 mCodec.clear();
1398 }
1399
1400 if (mCodecLooper != nullptr) {
1401 mCodecLooper->stop();
1402 mCodecLooper.clear();
1403 }
1404
1405 if (mCallbackLooper != nullptr) {
1406 mCallbackLooper->stop();
1407 mCallbackLooper.clear();
1408 }
1409
1410 mAsyncNotify.clear();
1411 mFormat.clear();
1412 }
1413
1414 // Return the size of the complete list of app segment, 0 indicates failure
findAppSegmentsSize(const uint8_t * appSegmentBuffer,size_t maxSize,size_t * app1SegmentSize)1415 size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1416 size_t maxSize, size_t *app1SegmentSize) {
1417 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1418 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1419 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1420 return 0;
1421 }
1422
1423 size_t expectedSize = 0;
1424 // First check for EXIF transport header at the end of the buffer
1425 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1426 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1427 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1428 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1429 return 0;
1430 }
1431
1432 expectedSize = blob->blobSize;
1433 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1434 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1435 return 0;
1436 }
1437
1438 uint32_t totalSize = 0;
1439
1440 // Verify APP1 marker (mandatory)
1441 uint8_t app1Marker[] = {0xFF, 0xE1};
1442 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1443 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1444 appSegmentBuffer[0], appSegmentBuffer[1]);
1445 return 0;
1446 }
1447 totalSize += sizeof(app1Marker);
1448
1449 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1450 appSegmentBuffer[totalSize+1];
1451 totalSize += app1Size;
1452
1453 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1454 __FUNCTION__, expectedSize, app1Size);
1455 while (totalSize < expectedSize) {
1456 if (appSegmentBuffer[totalSize] != 0xFF ||
1457 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1458 appSegmentBuffer[totalSize+1] > 0xEF) {
1459 // Invalid APPn marker
1460 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1461 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1462 return 0;
1463 }
1464 totalSize += 2;
1465
1466 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1467 appSegmentBuffer[totalSize+1];
1468 totalSize += appnSize;
1469 }
1470
1471 if (totalSize != expectedSize) {
1472 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1473 __FUNCTION__, totalSize, expectedSize);
1474 return 0;
1475 }
1476
1477 *app1SegmentSize = app1Size + sizeof(app1Marker);
1478 return expectedSize;
1479 }
1480
copyOneYuvTile(sp<MediaCodecBuffer> & codecBuffer,const CpuConsumer::LockedBuffer & yuvBuffer,size_t top,size_t left,size_t width,size_t height)1481 status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1482 const CpuConsumer::LockedBuffer& yuvBuffer,
1483 size_t top, size_t left, size_t width, size_t height) {
1484 ATRACE_CALL();
1485
1486 // Get stride information for codecBuffer
1487 sp<ABuffer> imageData;
1488 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1489 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1490 return BAD_VALUE;
1491 }
1492 if (imageData->size() != sizeof(MediaImage2)) {
1493 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1494 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1495 return BAD_VALUE;
1496 }
1497 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1498 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1499 imageInfo->mBitDepth != 8 ||
1500 imageInfo->mBitDepthAllocated != 8 ||
1501 imageInfo->mNumPlanes != 3) {
1502 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1503 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1504 imageInfo->mType, imageInfo->mBitDepth,
1505 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1506 return BAD_VALUE;
1507 }
1508
1509 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1510 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1511 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1512 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1513 imageInfo->mPlane[MediaImage2::V].mOffset,
1514 imageInfo->mPlane[MediaImage2::U].mRowInc,
1515 imageInfo->mPlane[MediaImage2::V].mRowInc,
1516 imageInfo->mPlane[MediaImage2::U].mColInc,
1517 imageInfo->mPlane[MediaImage2::V].mColInc);
1518
1519 // Y
1520 for (auto row = top; row < top+height; row++) {
1521 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1522 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
1523 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
1524 }
1525
1526 // U is Cb, V is Cr
1527 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1528 imageInfo->mPlane[MediaImage2::U].mOffset;
1529 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1530 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1531 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1532 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1533 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1534 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1535 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1536 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1537 bool isCodecUvPlannar =
1538 ((codecUPlaneFirst && codecUvOffsetDiff >=
1539 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1540 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1541 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1542 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1543 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1544 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1545
1546 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1547 (codecUPlaneFirst == cameraUPlaneFirst)) {
1548 // UV semiplannar
1549 // The chrome plane could be either Cb first, or Cr first. Take the
1550 // smaller address.
1551 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1552 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1553 for (auto row = top/2; row < (top+height)/2; row++) {
1554 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1555 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
1556 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
1557 }
1558 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1559 // U plane
1560 for (auto row = top/2; row < (top+height)/2; row++) {
1561 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1562 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
1563 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
1564 }
1565
1566 // V plane
1567 for (auto row = top/2; row < (top+height)/2; row++) {
1568 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1569 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
1570 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
1571 }
1572 } else {
1573 // Convert between semiplannar and plannar, or when UV orders are
1574 // different.
1575 uint8_t *dst = codecBuffer->data();
1576 for (auto row = top/2; row < (top+height)/2; row++) {
1577 for (auto col = left/2; col < (left+width)/2; col++) {
1578 // U/Cb
1579 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1580 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1581 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1582 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1583 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1584
1585 // V/Cr
1586 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1587 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1588 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1589 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1590 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1591 }
1592 }
1593 }
1594 return OK;
1595 }
1596
initCopyRowFunction(int32_t width)1597 void HeicCompositeStream::initCopyRowFunction(int32_t width)
1598 {
1599 using namespace libyuv;
1600
1601 mFnCopyRow = CopyRow_C;
1602 #if defined(HAS_COPYROW_SSE2)
1603 if (TestCpuFlag(kCpuHasSSE2)) {
1604 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1605 }
1606 #endif
1607 #if defined(HAS_COPYROW_AVX)
1608 if (TestCpuFlag(kCpuHasAVX)) {
1609 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1610 }
1611 #endif
1612 #if defined(HAS_COPYROW_ERMS)
1613 if (TestCpuFlag(kCpuHasERMS)) {
1614 mFnCopyRow = CopyRow_ERMS;
1615 }
1616 #endif
1617 #if defined(HAS_COPYROW_NEON)
1618 if (TestCpuFlag(kCpuHasNEON)) {
1619 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1620 }
1621 #endif
1622 #if defined(HAS_COPYROW_MIPS)
1623 if (TestCpuFlag(kCpuHasMIPS)) {
1624 mFnCopyRow = CopyRow_MIPS;
1625 }
1626 #endif
1627 }
1628
calcAppSegmentMaxSize(const CameraMetadata & info)1629 size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1630 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1631 size_t maxAppsSegment = 1;
1632 if (entry.count > 0) {
1633 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1634 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1635 }
1636 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1637 }
1638
updateCodecQualityLocked(int32_t quality)1639 void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
1640 if (quality != mQuality) {
1641 sp<AMessage> qualityParams = new AMessage;
1642 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
1643 status_t res = mCodec->setParameters(qualityParams);
1644 if (res != OK) {
1645 ALOGE("%s: Failed to set codec quality: %s (%d)",
1646 __FUNCTION__, strerror(-res), res);
1647 } else {
1648 mQuality = quality;
1649 }
1650 }
1651 }
1652
threadLoop()1653 bool HeicCompositeStream::threadLoop() {
1654 int64_t frameNumber = -1;
1655 bool newInputAvailable = false;
1656
1657 {
1658 Mutex::Autolock l(mMutex);
1659 if (mErrorState) {
1660 // In case we landed in error state, return any pending buffers and
1661 // halt all further processing.
1662 compilePendingInputLocked();
1663 releaseInputFramesLocked();
1664 return false;
1665 }
1666
1667
1668 while (!newInputAvailable) {
1669 compilePendingInputLocked();
1670 newInputAvailable = getNextReadyInputLocked(&frameNumber);
1671
1672 if (!newInputAvailable) {
1673 auto failingFrameNumber = getNextFailingInputLocked();
1674 if (failingFrameNumber >= 0) {
1675 releaseInputFrameLocked(failingFrameNumber,
1676 &mPendingInputFrames[failingFrameNumber]);
1677
1678 // It's okay to remove the entry from mPendingInputFrames
1679 // because:
1680 // 1. Only one internal stream (main input) is critical in
1681 // backing the output stream.
1682 // 2. If captureResult/appSegment arrives after the entry is
1683 // removed, they are simply skipped.
1684 mPendingInputFrames.erase(failingFrameNumber);
1685 if (mPendingInputFrames.size() == 0) {
1686 markTrackerIdle();
1687 }
1688 return true;
1689 }
1690
1691 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1692 if (ret == TIMED_OUT) {
1693 return true;
1694 } else if (ret != OK) {
1695 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1696 strerror(-ret), ret);
1697 return false;
1698 }
1699 }
1700 }
1701 }
1702
1703 auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
1704 Mutex::Autolock l(mMutex);
1705 if (res != OK) {
1706 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
1707 PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
1708 frameNumber, strerror(-res), res);
1709 mPendingInputFrames[frameNumber].error = true;
1710 }
1711
1712 releaseInputFramesLocked();
1713
1714 return true;
1715 }
1716
flagAnExifErrorFrameNumber(int64_t frameNumber)1717 void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
1718 Mutex::Autolock l(mMutex);
1719 mExifErrorFrameNumbers.emplace(frameNumber);
1720 mInputReadyCondition.signal();
1721 }
1722
onStreamBufferError(const CaptureResultExtras & resultExtras)1723 bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1724 bool res = false;
1725 int64_t frameNumber = resultExtras.frameNumber;
1726
1727 // Buffer errors concerning internal composite streams should not be directly visible to
1728 // camera clients. They must only receive a single buffer error with the public composite
1729 // stream id.
1730 if (resultExtras.errorStreamId == mAppSegmentStreamId) {
1731 ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1732 flagAnExifErrorFrameNumber(frameNumber);
1733 res = true;
1734 } else if (resultExtras.errorStreamId == mMainImageStreamId) {
1735 ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1736 flagAnErrorFrameNumber(frameNumber);
1737 res = true;
1738 }
1739
1740 return res;
1741 }
1742
onResultError(const CaptureResultExtras & resultExtras)1743 void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1744 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1745 // simply skip using the capture result metadata to override EXIF.
1746 Mutex::Autolock l(mMutex);
1747
1748 int64_t timestamp = -1;
1749 for (const auto& fn : mSettingsByFrameNumber) {
1750 if (fn.first == resultExtras.frameNumber) {
1751 timestamp = fn.second.timestamp;
1752 break;
1753 }
1754 }
1755 if (timestamp == -1) {
1756 for (const auto& inputFrame : mPendingInputFrames) {
1757 if (inputFrame.first == resultExtras.frameNumber) {
1758 timestamp = inputFrame.second.timestamp;
1759 break;
1760 }
1761 }
1762 }
1763
1764 if (timestamp == -1) {
1765 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1766 return;
1767 }
1768
1769 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1770 ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
1771 timestamp, resultExtras.frameNumber);
1772 mInputReadyCondition.signal();
1773 }
1774
onRequestError(const CaptureResultExtras & resultExtras)1775 void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
1776 auto frameNumber = resultExtras.frameNumber;
1777 ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1778 Mutex::Autolock l(mMutex);
1779 auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
1780 if (numRequests == 0) {
1781 // Pending request has been populated into mPendingInputFrames
1782 mErrorFrameNumbers.emplace(frameNumber);
1783 mInputReadyCondition.signal();
1784 } else {
1785 // REQUEST_ERROR was received without onShutter.
1786 }
1787 }
1788
markTrackerIdle()1789 void HeicCompositeStream::markTrackerIdle() {
1790 sp<StatusTracker> statusTracker = mStatusTracker.promote();
1791 if (statusTracker != nullptr) {
1792 statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
1793 ALOGV("%s: Mark component as idle", __FUNCTION__);
1794 }
1795 }
1796
onMessageReceived(const sp<AMessage> & msg)1797 void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1798 sp<HeicCompositeStream> parent = mParent.promote();
1799 if (parent == nullptr) return;
1800
1801 switch (msg->what()) {
1802 case kWhatCallbackNotify: {
1803 int32_t cbID;
1804 if (!msg->findInt32("callbackID", &cbID)) {
1805 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1806 break;
1807 }
1808
1809 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1810
1811 switch (cbID) {
1812 case MediaCodec::CB_INPUT_AVAILABLE: {
1813 int32_t index;
1814 if (!msg->findInt32("index", &index)) {
1815 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1816 break;
1817 }
1818 parent->onHeicInputFrameAvailable(index);
1819 break;
1820 }
1821
1822 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1823 int32_t index;
1824 size_t offset;
1825 size_t size;
1826 int64_t timeUs;
1827 int32_t flags;
1828
1829 if (!msg->findInt32("index", &index)) {
1830 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1831 break;
1832 }
1833 if (!msg->findSize("offset", &offset)) {
1834 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1835 break;
1836 }
1837 if (!msg->findSize("size", &size)) {
1838 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1839 break;
1840 }
1841 if (!msg->findInt64("timeUs", &timeUs)) {
1842 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1843 break;
1844 }
1845 if (!msg->findInt32("flags", &flags)) {
1846 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1847 break;
1848 }
1849
1850 CodecOutputBufferInfo bufferInfo = {
1851 index,
1852 (int32_t)offset,
1853 (int32_t)size,
1854 timeUs,
1855 (uint32_t)flags};
1856
1857 parent->onHeicOutputFrameAvailable(bufferInfo);
1858 break;
1859 }
1860
1861 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1862 sp<AMessage> format;
1863 if (!msg->findMessage("format", &format)) {
1864 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1865 break;
1866 }
1867 // Here format is MediaCodec's internal copy of output format.
1868 // Make a copy since onHeicFormatChanged() might modify it.
1869 sp<AMessage> formatCopy;
1870 if (format != nullptr) {
1871 formatCopy = format->dup();
1872 }
1873 parent->onHeicFormatChanged(formatCopy);
1874 break;
1875 }
1876
1877 case MediaCodec::CB_ERROR: {
1878 status_t err;
1879 int32_t actionCode;
1880 AString detail;
1881 if (!msg->findInt32("err", &err)) {
1882 ALOGE("CB_ERROR: err is expected.");
1883 break;
1884 }
1885 if (!msg->findInt32("action", &actionCode)) {
1886 ALOGE("CB_ERROR: action is expected.");
1887 break;
1888 }
1889 msg->findString("detail", &detail);
1890 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1891 err, actionCode, detail.c_str());
1892
1893 parent->onHeicCodecError();
1894 break;
1895 }
1896
1897 default: {
1898 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1899 break;
1900 }
1901 }
1902 break;
1903 }
1904
1905 default:
1906 ALOGE("shouldn't be here");
1907 break;
1908 }
1909 }
1910
1911 }; // namespace camera3
1912 }; // namespace android
1913