1 /*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #ifdef MPEG4
19 #define LOG_TAG "C2SoftMpeg4Enc"
20 #else
21 #define LOG_TAG "C2SoftH263Enc"
22 #endif
23 #include <log/log.h>
24
25 #include <inttypes.h>
26
27 #include <media/hardware/VideoAPI.h>
28 #include <media/stagefright/foundation/AUtils.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <utils/misc.h>
31
32 #include <C2Debug.h>
33 #include <C2PlatformSupport.h>
34 #include <SimpleC2Interface.h>
35 #include <util/C2InterfaceHelper.h>
36
37 #include "C2SoftMpeg4Enc.h"
38 #include "mp4enc_api.h"
39
40 namespace android {
41
42 namespace {
43
44 #ifdef MPEG4
45 constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.encoder";
46 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_MPEG4;
47 #else
48 constexpr char COMPONENT_NAME[] = "c2.android.h263.encoder";
49 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
50 #endif
51
52 constexpr float VBV_DELAY = 5.0f;
53
54 } // namepsace
55
56 class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
57 public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)58 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
59 : SimpleInterface<void>::BaseParams(
60 helper,
61 COMPONENT_NAME,
62 C2Component::KIND_ENCODER,
63 C2Component::DOMAIN_VIDEO,
64 MEDIA_MIMETYPE_VIDEO) {
65 noPrivateBuffers(); // TODO: account for our buffers here
66 noInputReferences();
67 noOutputReferences();
68 noInputLatency();
69 noTimeStretch();
70 setDerivedInstance(this);
71
72 addParameter(
73 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
74 .withConstValue(new C2ComponentAttributesSetting(
75 C2Component::ATTRIB_IS_TEMPORAL))
76 .build());
77
78 addParameter(
79 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
80 .withConstValue(new C2StreamUsageTuning::input(
81 0u, (uint64_t)C2MemoryUsage::CPU_READ))
82 .build());
83
84 addParameter(
85 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
86 .withDefault(new C2StreamPictureSizeInfo::input(0u, 176, 144))
87 .withFields({
88 #ifdef MPEG4
89 C2F(mSize, width).inRange(16, 176, 16),
90 C2F(mSize, height).inRange(16, 144, 16),
91 #else
92 C2F(mSize, width).oneOf({176, 352}),
93 C2F(mSize, height).oneOf({144, 288}),
94 #endif
95 })
96 .withSetter(SizeSetter)
97 .build());
98
99 addParameter(
100 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
101 .withDefault(new C2StreamFrameRateInfo::output(0u, 17.))
102 // TODO: More restriction?
103 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
104 .withSetter(
105 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
106 .build());
107
108 addParameter(
109 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
110 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
111 .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
112 .withSetter(BitrateSetter)
113 .build());
114
115 addParameter(
116 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
117 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
118 .withFields({C2F(mSyncFramePeriod, value).any()})
119 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
120 .build());
121
122 #ifdef MPEG4
123 addParameter(
124 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
125 .withDefault(new C2StreamProfileLevelInfo::output(
126 0u, PROFILE_MP4V_SIMPLE, LEVEL_MP4V_2))
127 .withFields({
128 C2F(mProfileLevel, profile).equalTo(
129 PROFILE_MP4V_SIMPLE),
130 C2F(mProfileLevel, level).oneOf({
131 C2Config::LEVEL_MP4V_0,
132 C2Config::LEVEL_MP4V_0B,
133 C2Config::LEVEL_MP4V_1,
134 C2Config::LEVEL_MP4V_2})
135 })
136 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
137 .build());
138 #else
139 addParameter(
140 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
141 .withDefault(new C2StreamProfileLevelInfo::output(
142 0u, PROFILE_H263_BASELINE, LEVEL_H263_45))
143 .withFields({
144 C2F(mProfileLevel, profile).equalTo(
145 PROFILE_H263_BASELINE),
146 C2F(mProfileLevel, level).oneOf({
147 C2Config::LEVEL_H263_10,
148 C2Config::LEVEL_H263_20,
149 C2Config::LEVEL_H263_30,
150 C2Config::LEVEL_H263_40,
151 C2Config::LEVEL_H263_45})
152 })
153 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
154 .build());
155 #endif
156 }
157
BitrateSetter(bool mayBlock,C2P<C2StreamBitrateInfo::output> & me)158 static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
159 (void)mayBlock;
160 C2R res = C2R::Ok();
161 if (me.v.value <= 4096) {
162 me.set().value = 4096;
163 }
164 return res;
165 }
166
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::input> & oldMe,C2P<C2StreamPictureSizeInfo::input> & me)167 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
168 C2P<C2StreamPictureSizeInfo::input> &me) {
169 (void)mayBlock;
170 C2R res = C2R::Ok();
171 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
172 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
173 me.set().width = oldMe.v.width;
174 }
175 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
176 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
177 me.set().height = oldMe.v.height;
178 }
179 return res;
180 }
181
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::output> & me,const C2P<C2StreamPictureSizeInfo::input> & size,const C2P<C2StreamFrameRateInfo::output> & frameRate,const C2P<C2StreamBitrateInfo::output> & bitrate)182 static C2R ProfileLevelSetter(
183 bool mayBlock,
184 C2P<C2StreamProfileLevelInfo::output> &me,
185 const C2P<C2StreamPictureSizeInfo::input> &size,
186 const C2P<C2StreamFrameRateInfo::output> &frameRate,
187 const C2P<C2StreamBitrateInfo::output> &bitrate) {
188 (void)mayBlock;
189 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
190 #ifdef MPEG4
191 me.set().profile = PROFILE_MP4V_SIMPLE;
192 #else
193 me.set().profile = PROFILE_H263_BASELINE;
194 #endif
195 }
196
197 struct LevelLimits {
198 C2Config::level_t level;
199 uint32_t sampleRate;
200 uint32_t width;
201 uint32_t height;
202 uint32_t frameRate;
203 uint32_t bitrate;
204 uint32_t vbvSize;
205 };
206
207 constexpr LevelLimits kLimits[] = {
208 #ifdef MPEG4
209 { LEVEL_MP4V_0, 380160, 176, 144, 15, 64000, 163840 },
210 // { LEVEL_MP4V_0B, 380160, 176, 144, 15, 128000, 163840 },
211 { LEVEL_MP4V_1, 380160, 176, 144, 30, 64000, 163840 },
212 { LEVEL_MP4V_2, 1520640, 352, 288, 30, 128000, 655360 },
213 #else
214 // HRD Buffer Size = (B + BPPmaxKb * 1024 bits)
215 // where, (BPPmaxKb * 1024) is maximum number of bits per picture
216 // that has been negotiated for use in the bitstream Sec 3.6 of T-Rec-H.263
217 // and B = 4 * Rmax / PCF. Rmax is max bit rate and PCF is picture
218 // clock frequency
219 { LEVEL_H263_10, 380160, 176, 144, 15, 64000, 74077 },
220 { LEVEL_H263_45, 380160, 176, 144, 15, 128000, 82619 },
221 { LEVEL_H263_20, 1520640, 352, 288, 30, 128000, 279227 },
222 { LEVEL_H263_30, 3041280, 352, 288, 30, 384000, 313395 },
223 { LEVEL_H263_40, 3041280, 352, 288, 30, 2048000, 535483 },
224 // { LEVEL_H263_50, 5068800, 352, 288, 60, 4096000, 808823 },
225 #endif
226 };
227
228 auto mbs = ((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
229 auto sampleRate = mbs * frameRate.v.value * 16 * 16;
230 auto vbvSize = bitrate.v.value * VBV_DELAY;
231
232 // Check if the supplied level meets the MB / bitrate requirements. If
233 // not, update the level with the lowest level meeting the requirements.
234 bool found = false;
235
236 // By default needsUpdate = false in case the supplied level does meet
237 // the requirements.
238 bool needsUpdate = false;
239 #ifdef MPEG4
240 // For Level 0b, we want to update the level anyway, as library does not
241 // seem to accept this value.
242 if (me.v.level == LEVEL_MP4V_0B) {
243 needsUpdate = true;
244 }
245 #endif
246 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
247 needsUpdate = true;
248 }
249 for (const LevelLimits &limit : kLimits) {
250 if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
251 vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
252 bitrate.v.value <= limit.bitrate && frameRate.v.value <= limit.frameRate) {
253 // This is the lowest level that meets the requirements, and if
254 // we haven't seen the supplied level yet, that means we don't
255 // need the update.
256 if (needsUpdate) {
257 ALOGD("Given level %x does not cover current configuration: "
258 "adjusting to %x", me.v.level, limit.level);
259 me.set().level = limit.level;
260 }
261 found = true;
262 break;
263 }
264 if (me.v.level == limit.level) {
265 // We break out of the loop when the lowest feasible level is
266 // found. The fact that we're here means that our level doesn't
267 // meet the requirement and needs to be updated.
268 needsUpdate = true;
269 }
270 }
271 // If not found or exceeds max level, set to the highest supported level.
272 #ifdef MPEG4
273 if (!found || me.v.level > LEVEL_MP4V_2) {
274 me.set().level = LEVEL_MP4V_2;
275 }
276 #else
277 if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
278 me.set().level = LEVEL_H263_40;
279 }
280 #endif
281 return C2R::Ok();
282 }
283
284 // unsafe getters
getSize_l() const285 std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
getFrameRate_l() const286 std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
getBitrate_l() const287 std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
getSyncFramePeriod() const288 uint32_t getSyncFramePeriod() const {
289 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
290 return 0;
291 }
292 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
293 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
294 }
295
296 private:
297 std::shared_ptr<C2StreamUsageTuning::input> mUsage;
298 std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
299 std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
300 std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
301 std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
302 std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
303 };
304
C2SoftMpeg4Enc(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)305 C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
306 const std::shared_ptr<IntfImpl>& intfImpl)
307 : SimpleC2Component(
308 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
309 mIntf(intfImpl),
310 mHandle(nullptr),
311 mEncParams(nullptr),
312 mStarted(false),
313 mOutBufferSize(524288) {
314 }
315
~C2SoftMpeg4Enc()316 C2SoftMpeg4Enc::~C2SoftMpeg4Enc() {
317 onRelease();
318 }
319
onInit()320 c2_status_t C2SoftMpeg4Enc::onInit() {
321 #ifdef MPEG4
322 mEncodeMode = COMBINE_MODE_WITH_ERR_RES;
323 #else
324 mEncodeMode = H263_MODE;
325 #endif
326 if (!mHandle) {
327 mHandle = new tagvideoEncControls;
328 }
329
330 if (!mEncParams) {
331 mEncParams = new tagvideoEncOptions;
332 }
333
334 if (!(mEncParams && mHandle)) return C2_NO_MEMORY;
335
336 mSignalledOutputEos = false;
337 mSignalledError = false;
338
339 return initEncoder();
340 }
341
onStop()342 c2_status_t C2SoftMpeg4Enc::onStop() {
343 if (!mStarted) {
344 return C2_OK;
345 }
346 if (mHandle) {
347 (void)PVCleanUpVideoEncoder(mHandle);
348 }
349 mStarted = false;
350 mSignalledOutputEos = false;
351 mSignalledError = false;
352 return C2_OK;
353 }
354
onReset()355 void C2SoftMpeg4Enc::onReset() {
356 onStop();
357 initEncoder();
358 }
359
onRelease()360 void C2SoftMpeg4Enc::onRelease() {
361 onStop();
362 if (mEncParams) {
363 delete mEncParams;
364 mEncParams = nullptr;
365 }
366 if (mHandle) {
367 delete mHandle;
368 mHandle = nullptr;
369 }
370 }
371
onFlush_sm()372 c2_status_t C2SoftMpeg4Enc::onFlush_sm() {
373 return C2_OK;
374 }
375
fillEmptyWork(const std::unique_ptr<C2Work> & work)376 static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
377 uint32_t flags = 0;
378 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
379 flags |= C2FrameData::FLAG_END_OF_STREAM;
380 ALOGV("signalling eos");
381 }
382 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
383 work->worklets.front()->output.buffers.clear();
384 work->worklets.front()->output.ordinal = work->input.ordinal;
385 work->workletsProcessed = 1u;
386 }
387
initEncParams()388 c2_status_t C2SoftMpeg4Enc::initEncParams() {
389 if (mHandle) {
390 memset(mHandle, 0, sizeof(tagvideoEncControls));
391 } else return C2_CORRUPTED;
392 if (mEncParams) {
393 memset(mEncParams, 0, sizeof(tagvideoEncOptions));
394 } else return C2_CORRUPTED;
395
396 if (!PVGetDefaultEncOption(mEncParams, 0)) {
397 ALOGE("Failed to get default encoding parameters");
398 return C2_CORRUPTED;
399 }
400
401 if (mFrameRate->value == 0) {
402 ALOGE("Framerate should not be 0");
403 return C2_BAD_VALUE;
404 }
405
406 mEncParams->encMode = mEncodeMode;
407 mEncParams->encWidth[0] = mSize->width;
408 mEncParams->encHeight[0] = mSize->height;
409 mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
410 mEncParams->rcType = VBR_1;
411 mEncParams->vbvDelay = VBV_DELAY;
412 mEncParams->profile_level = CORE_PROFILE_LEVEL2;
413 mEncParams->packetSize = 32;
414 mEncParams->rvlcEnable = PV_OFF;
415 mEncParams->numLayers = 1;
416 mEncParams->timeIncRes = 1000;
417 mEncParams->tickPerSrc = mEncParams->timeIncRes / (mFrameRate->value + 0.5);
418 mEncParams->bitRate[0] = mBitrate->value;
419 mEncParams->iQuant[0] = 15;
420 mEncParams->pQuant[0] = 12;
421 mEncParams->quantType[0] = 0;
422 mEncParams->noFrameSkipped = PV_OFF;
423
424 // PV's MPEG4 encoder requires the video dimension of multiple
425 if (mSize->width % 16 != 0 || mSize->height % 16 != 0) {
426 ALOGE("Video frame size %dx%d must be a multiple of 16",
427 mSize->width, mSize->height);
428 return C2_BAD_VALUE;
429 }
430
431 // Set IDR frame refresh interval
432 mEncParams->intraPeriod = mIntf->getSyncFramePeriod();
433 mEncParams->numIntraMB = 0;
434 mEncParams->sceneDetect = PV_ON;
435 mEncParams->searchRange = 16;
436 mEncParams->mv8x8Enable = PV_OFF;
437 mEncParams->gobHeaderInterval = 0;
438 mEncParams->useACPred = PV_ON;
439 mEncParams->intraDCVlcTh = 0;
440
441 return C2_OK;
442 }
443
initEncoder()444 c2_status_t C2SoftMpeg4Enc::initEncoder() {
445 if (mStarted) {
446 return C2_OK;
447 }
448 {
449 IntfImpl::Lock lock = mIntf->lock();
450 mSize = mIntf->getSize_l();
451 mBitrate = mIntf->getBitrate_l();
452 mFrameRate = mIntf->getFrameRate_l();
453 }
454 c2_status_t err = initEncParams();
455 if (C2_OK != err) {
456 ALOGE("Failed to initialized encoder params");
457 mSignalledError = true;
458 return err;
459 }
460 if (!PVInitVideoEncoder(mHandle, mEncParams)) {
461 ALOGE("Failed to initialize the encoder");
462 mSignalledError = true;
463 return C2_CORRUPTED;
464 }
465
466 // 1st buffer for codec specific data
467 mNumInputFrames = -1;
468 mStarted = true;
469 return C2_OK;
470 }
471
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)472 void C2SoftMpeg4Enc::process(
473 const std::unique_ptr<C2Work> &work,
474 const std::shared_ptr<C2BlockPool> &pool) {
475 // Initialize output work
476 work->result = C2_OK;
477 work->workletsProcessed = 1u;
478 work->worklets.front()->output.flags = work->input.flags;
479 if (mSignalledError || mSignalledOutputEos) {
480 work->result = C2_BAD_VALUE;
481 return;
482 }
483
484 // Initialize encoder if not already initialized
485 if (!mStarted && C2_OK != initEncoder()) {
486 ALOGE("Failed to initialize encoder");
487 mSignalledError = true;
488 work->result = C2_CORRUPTED;
489 return;
490 }
491
492 std::shared_ptr<C2LinearBlock> block;
493 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
494 c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
495 if (err != C2_OK) {
496 ALOGE("fetchLinearBlock for Output failed with status %d", err);
497 work->result = C2_NO_MEMORY;
498 return;
499 }
500
501 C2WriteView wView = block->map().get();
502 if (wView.error()) {
503 ALOGE("write view map failed %d", wView.error());
504 work->result = wView.error();
505 return;
506 }
507
508 uint8_t *outPtr = (uint8_t *)wView.data();
509 if (mNumInputFrames < 0) {
510 // The very first thing we want to output is the codec specific data.
511 int32_t outputSize = mOutBufferSize;
512 if (!PVGetVolHeader(mHandle, outPtr, &outputSize, 0)) {
513 ALOGE("Failed to get VOL header");
514 mSignalledError = true;
515 work->result = C2_CORRUPTED;
516 return;
517 } else {
518 ALOGV("Bytes Generated in header %d\n", outputSize);
519 }
520
521 ++mNumInputFrames;
522 if (outputSize) {
523 std::unique_ptr<C2StreamInitDataInfo::output> csd =
524 C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
525 if (!csd) {
526 ALOGE("CSD allocation failed");
527 mSignalledError = true;
528 work->result = C2_NO_MEMORY;
529 return;
530 }
531 memcpy(csd->m.value, outPtr, outputSize);
532 work->worklets.front()->output.configUpdate.push_back(std::move(csd));
533 }
534 }
535
536 // handle dynamic bitrate change
537 {
538 IntfImpl::Lock lock = mIntf->lock();
539 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
540 lock.unlock();
541
542 if (bitrate != mBitrate) {
543 mBitrate = bitrate;
544 int layerBitrate[2] = {static_cast<int>(mBitrate->value), 0};
545 ALOGV("Calling PVUpdateBitRate %d", layerBitrate[0]);
546 PVUpdateBitRate(mHandle, layerBitrate);
547 }
548 }
549
550 std::shared_ptr<C2GraphicView> rView;
551 std::shared_ptr<C2Buffer> inputBuffer;
552 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
553 if (!work->input.buffers.empty()) {
554 inputBuffer = work->input.buffers[0];
555 rView = std::make_shared<C2GraphicView>(
556 inputBuffer->data().graphicBlocks().front().map().get());
557 if (rView->error() != C2_OK) {
558 ALOGE("graphic view map err = %d", rView->error());
559 work->result = rView->error();
560 return;
561 }
562 //(b/232396154)
563 //workaround for incorrect crop size in view when using surface mode
564 rView->setCrop_be(C2Rect(mSize->width, mSize->height));
565 } else {
566 fillEmptyWork(work);
567 if (eos) {
568 mSignalledOutputEos = true;
569 ALOGV("signalled EOS");
570 }
571 return;
572 }
573
574 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
575 const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
576 if (inBuffer.width() < mSize->width ||
577 inBuffer.height() < mSize->height) {
578 /* Expect width height to be configured */
579 ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", inBuffer.width(),
580 mSize->width, inBuffer.height(), mSize->height);
581 work->result = C2_BAD_VALUE;
582 return;
583 }
584
585 const C2PlanarLayout &layout = rView->layout();
586 uint8_t *yPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_Y]);
587 uint8_t *uPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_U]);
588 uint8_t *vPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_V]);
589 int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
590 int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
591 int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
592 uint32_t width = mSize->width;
593 uint32_t height = mSize->height;
594 // width and height are always even (as block size is 16x16)
595 CHECK_EQ((width & 1u), 0u);
596 CHECK_EQ((height & 1u), 0u);
597 size_t yPlaneSize = width * height;
598 switch (layout.type) {
599 case C2PlanarLayout::TYPE_RGB:
600 [[fallthrough]];
601 case C2PlanarLayout::TYPE_RGBA: {
602 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
603 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
604 yPlane = conversionBuffer.data();
605 uPlane = yPlane + yPlaneSize;
606 vPlane = uPlane + yPlaneSize / 4;
607 yStride = width;
608 uStride = vStride = width / 2;
609 ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *rView.get());
610 break;
611 }
612 case C2PlanarLayout::TYPE_YUV: {
613 if (!IsYUV420(*rView)) {
614 ALOGE("input is not YUV420");
615 work->result = C2_BAD_VALUE;
616 break;
617 }
618
619 if (layout.planes[layout.PLANE_Y].colInc == 1
620 && layout.planes[layout.PLANE_U].colInc == 1
621 && layout.planes[layout.PLANE_V].colInc == 1
622 && yStride == align(width, 16)
623 && uStride == vStride
624 && yStride == 2 * vStride) {
625 // I420 compatible with yStride being equal to aligned width
626 // planes are already set up above
627 break;
628 }
629
630 // copy to I420
631 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
632 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
633 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, width, height);
634 status_t err = ImageCopy(conversionBuffer.data(), &img, *rView);
635 if (err != OK) {
636 ALOGE("Buffer conversion failed: %d", err);
637 work->result = C2_BAD_VALUE;
638 return;
639 }
640 yPlane = conversionBuffer.data();
641 uPlane = yPlane + yPlaneSize;
642 vPlane = uPlane + yPlaneSize / 4;
643 yStride = width;
644 uStride = vStride = width / 2;
645 break;
646 }
647
648 case C2PlanarLayout::TYPE_YUVA:
649 ALOGE("YUVA plane type is not supported");
650 work->result = C2_BAD_VALUE;
651 return;
652
653 default:
654 ALOGE("Unrecognized plane type: %d", layout.type);
655 work->result = C2_BAD_VALUE;
656 return;
657 }
658
659 CHECK(NULL != yPlane);
660 /* Encode frames */
661 VideoEncFrameIO vin, vout;
662 memset(&vin, 0, sizeof(vin));
663 memset(&vout, 0, sizeof(vout));
664 vin.yChan = yPlane;
665 vin.uChan = uPlane;
666 vin.vChan = vPlane;
667 vin.timestamp = (inputTimeStamp + 500) / 1000; // in ms
668 vin.height = align(height, 16);
669 vin.pitch = align(width, 16);
670
671 uint32_t modTimeMs = 0;
672 int32_t nLayer = 0;
673 MP4HintTrack hintTrack;
674 int32_t outputSize = mOutBufferSize;
675 if (!PVEncodeVideoFrame(mHandle, &vin, &vout, &modTimeMs, outPtr, &outputSize, &nLayer) ||
676 !PVGetHintTrack(mHandle, &hintTrack)) {
677 ALOGE("Failed to encode frame or get hint track at frame %" PRId64, mNumInputFrames);
678 mSignalledError = true;
679 work->result = C2_CORRUPTED;
680 return;
681 }
682 ALOGV("outputSize filled : %d", outputSize);
683 ++mNumInputFrames;
684 CHECK(NULL == PVGetOverrunBuffer(mHandle));
685
686 fillEmptyWork(work);
687 if (outputSize) {
688 std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outputSize);
689 work->worklets.front()->output.ordinal.timestamp = inputTimeStamp;
690 if (hintTrack.CodeType == 0) {
691 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
692 0u /* stream id */, C2Config::SYNC_FRAME));
693 }
694 work->worklets.front()->output.buffers.push_back(buffer);
695 }
696 if (eos) {
697 mSignalledOutputEos = true;
698 }
699
700 mConversionBuffersInUse.erase(yPlane);
701 }
702
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)703 c2_status_t C2SoftMpeg4Enc::drain(
704 uint32_t drainMode,
705 const std::shared_ptr<C2BlockPool> &pool) {
706 (void)pool;
707 if (drainMode == NO_DRAIN) {
708 ALOGW("drain with NO_DRAIN: no-op");
709 return C2_OK;
710 }
711 if (drainMode == DRAIN_CHAIN) {
712 ALOGW("DRAIN_CHAIN not supported");
713 return C2_OMITTED;
714 }
715
716 return C2_OK;
717 }
718
719 class C2SoftMpeg4EncFactory : public C2ComponentFactory {
720 public:
C2SoftMpeg4EncFactory()721 C2SoftMpeg4EncFactory()
722 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
723 GetCodec2PlatformComponentStore()->getParamReflector())) {}
724
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)725 virtual c2_status_t createComponent(
726 c2_node_id_t id,
727 std::shared_ptr<C2Component>* const component,
728 std::function<void(C2Component*)> deleter) override {
729 *component = std::shared_ptr<C2Component>(
730 new C2SoftMpeg4Enc(
731 COMPONENT_NAME, id,
732 std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
733 deleter);
734 return C2_OK;
735 }
736
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)737 virtual c2_status_t createInterface(
738 c2_node_id_t id,
739 std::shared_ptr<C2ComponentInterface>* const interface,
740 std::function<void(C2ComponentInterface*)> deleter) override {
741 *interface = std::shared_ptr<C2ComponentInterface>(
742 new SimpleInterface<C2SoftMpeg4Enc::IntfImpl>(
743 COMPONENT_NAME, id,
744 std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
745 deleter);
746 return C2_OK;
747 }
748
749 virtual ~C2SoftMpeg4EncFactory() override = default;
750
751 private:
752 std::shared_ptr<C2ReflectorHelper> mHelper;
753 };
754
755 } // namespace android
756
757 __attribute__((cfi_canonical_jump_table))
CreateCodec2Factory()758 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
759 ALOGV("in %s", __func__);
760 return new ::android::C2SoftMpeg4EncFactory();
761 }
762
763 __attribute__((cfi_canonical_jump_table))
DestroyCodec2Factory(::C2ComponentFactory * factory)764 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
765 ALOGV("in %s", __func__);
766 delete factory;
767 }
768