1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "C2SoftGav1Dec"
19 #include "C2SoftGav1Dec.h"
20 
21 #include <android-base/properties.h>
22 #include <C2Debug.h>
23 #include <C2PlatformSupport.h>
24 #include <Codec2BufferUtils.h>
25 #include <Codec2CommonUtils.h>
26 #include <Codec2Mapper.h>
27 #include <SimpleC2Interface.h>
28 #include <libyuv.h>
29 #include <log/log.h>
30 #include <media/stagefright/foundation/AUtils.h>
31 #include <media/stagefright/foundation/MediaDefs.h>
32 
33 // libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
34 #if LIBYUV_VERSION >= 1780
35 #include <algorithm>
36 #define HAVE_LIBYUV_I410_I210_TO_AB30 1
37 #else
38 #define HAVE_LIBYUV_I410_I210_TO_AB30 0
39 #endif
40 
41 namespace android {
42 
43 // Property used to control the number of threads used in the gav1 decoder.
44 constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
45 
46 // codecname set and passed in as a compile flag from Android.bp
47 constexpr char COMPONENT_NAME[] = CODECNAME;
48 
49 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
50 
51 class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
52  public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)53   explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
54       : SimpleInterface<void>::BaseParams(
55             helper, COMPONENT_NAME, C2Component::KIND_DECODER,
56             C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
57     noPrivateBuffers();  // TODO: account for our buffers here.
58     noInputReferences();
59     noOutputReferences();
60     noInputLatency();
61     noTimeStretch();
62 
63     addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
64                      .withConstValue(new C2ComponentAttributesSetting(
65                          C2Component::ATTRIB_IS_TEMPORAL))
66                      .build());
67 
68     addParameter(
69         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
70             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
71             .withFields({
72                 C2F(mSize, width).inRange(2, 4096),
73                 C2F(mSize, height).inRange(2, 4096),
74             })
75             .withSetter(SizeSetter)
76             .build());
77 
78     addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
79                      .withDefault(new C2StreamProfileLevelInfo::input(
80                          0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
81                      .withFields({C2F(mProfileLevel, profile)
82                                       .oneOf({C2Config::PROFILE_AV1_0,
83                                               C2Config::PROFILE_AV1_1}),
84                                   C2F(mProfileLevel, level)
85                                       .oneOf({
86                                           C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
87                                           C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
88                                           C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
89                                           C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
90                                           C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
91                                           C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
92                                           C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
93                                           C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
94                                       })})
95                      .withSetter(ProfileLevelSetter, mSize)
96                      .build());
97 
98     mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99     addParameter(
100         DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
101             .withDefault(mHdr10PlusInfoInput)
102             .withFields({
103                 C2F(mHdr10PlusInfoInput, m.value).any(),
104             })
105             .withSetter(Hdr10PlusInfoInputSetter)
106             .build());
107 
108     mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
109     addParameter(
110         DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111             .withDefault(mHdr10PlusInfoOutput)
112             .withFields({
113                 C2F(mHdr10PlusInfoOutput, m.value).any(),
114             })
115             .withSetter(Hdr10PlusInfoOutputSetter)
116             .build());
117 
118     // default static info
119     C2HdrStaticMetadataStruct defaultStaticInfo{};
120     helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121     addParameter(
122         DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123             .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124             .withFields({
125                 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
126                 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
127                 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
128                 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
129                 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
130                 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
131                 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132                 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
133                 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
134                 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
135                 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
136                 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
137             })
138             .withSetter(HdrStaticInfoSetter)
139             .build());
140 
141     addParameter(
142         DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
143             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
144             .withFields({
145                 C2F(mSize, width).inRange(2, 2048, 2),
146                 C2F(mSize, height).inRange(2, 2048, 2),
147             })
148             .withSetter(MaxPictureSizeSetter, mSize)
149             .build());
150 
151     addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
152                      .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
153                      .withFields({
154                          C2F(mMaxInputSize, value).any(),
155                      })
156                      .calculatedAs(MaxInputSizeSetter, mMaxSize)
157                      .build());
158 
159     C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
160     std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
161         C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
162                                                C2Color::YUV_420);
163     memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
164 
165     defaultColorInfo = C2StreamColorInfo::output::AllocShared(
166         {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
167         C2Color::YUV_420);
168     helper->addStructDescriptors<C2ChromaOffsetStruct>();
169 
170     addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
171                      .withConstValue(defaultColorInfo)
172                      .build());
173 
174     addParameter(
175         DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
176             .withDefault(new C2StreamColorAspectsTuning::output(
177                 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178                 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179             .withFields(
180                 {C2F(mDefaultColorAspects, range)
181                      .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182                  C2F(mDefaultColorAspects, primaries)
183                      .inRange(C2Color::PRIMARIES_UNSPECIFIED,
184                               C2Color::PRIMARIES_OTHER),
185                  C2F(mDefaultColorAspects, transfer)
186                      .inRange(C2Color::TRANSFER_UNSPECIFIED,
187                               C2Color::TRANSFER_OTHER),
188                  C2F(mDefaultColorAspects, matrix)
189                      .inRange(C2Color::MATRIX_UNSPECIFIED,
190                               C2Color::MATRIX_OTHER)})
191             .withSetter(DefaultColorAspectsSetter)
192             .build());
193 
194       addParameter(
195               DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
196               .withDefault(new C2StreamColorAspectsInfo::input(
197                       0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
198                       C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
199               .withFields({
200                   C2F(mCodedColorAspects, range).inRange(
201                               C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
202                   C2F(mCodedColorAspects, primaries).inRange(
203                               C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
204                   C2F(mCodedColorAspects, transfer).inRange(
205                               C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
206                   C2F(mCodedColorAspects, matrix).inRange(
207                               C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
208               })
209               .withSetter(CodedColorAspectsSetter)
210               .build());
211 
212       addParameter(
213               DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
214               .withDefault(new C2StreamColorAspectsInfo::output(
215                       0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
216                       C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
217               .withFields({
218                   C2F(mColorAspects, range).inRange(
219                               C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
220                   C2F(mColorAspects, primaries).inRange(
221                               C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
222                   C2F(mColorAspects, transfer).inRange(
223                               C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
224                   C2F(mColorAspects, matrix).inRange(
225                               C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
226               })
227               .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
228               .build());
229 
230     std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
231     if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
232         pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
233     }
234     // If color format surface isn't added to supported formats, there is no way to know
235     // when the color-format is configured to surface. This is necessary to be able to
236     // choose 10-bit format while decoding 10-bit clips in surface mode.
237     pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
238 
239     // TODO: support more formats?
240     addParameter(
241             DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
242             .withDefault(new C2StreamPixelFormatInfo::output(
243                               0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
244             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
245             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
246             .build());
247   }
248 
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::output> & oldMe,C2P<C2StreamPictureSizeInfo::output> & me)249   static C2R SizeSetter(bool mayBlock,
250                         const C2P<C2StreamPictureSizeInfo::output> &oldMe,
251                         C2P<C2StreamPictureSizeInfo::output> &me) {
252     (void)mayBlock;
253     C2R res = C2R::Ok();
254     if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
255       res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
256       me.set().width = oldMe.v.width;
257     }
258     if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
259       res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
260       me.set().height = oldMe.v.height;
261     }
262     return res;
263   }
264 
MaxPictureSizeSetter(bool mayBlock,C2P<C2StreamMaxPictureSizeTuning::output> & me,const C2P<C2StreamPictureSizeInfo::output> & size)265   static C2R MaxPictureSizeSetter(
266       bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
267       const C2P<C2StreamPictureSizeInfo::output> &size) {
268     (void)mayBlock;
269     // TODO: get max width/height from the size's field helpers vs.
270     // hardcoding
271     me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
272     me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
273     return C2R::Ok();
274   }
275 
MaxInputSizeSetter(bool mayBlock,C2P<C2StreamMaxBufferSizeInfo::input> & me,const C2P<C2StreamMaxPictureSizeTuning::output> & maxSize)276   static C2R MaxInputSizeSetter(
277       bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
278       const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
279     (void)mayBlock;
280     // assume compression ratio of 2, but enforce a floor
281     me.set().value = c2_max((((maxSize.v.width + 63) / 64)
282                 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
283     return C2R::Ok();
284   }
285 
DefaultColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsTuning::output> & me)286   static C2R DefaultColorAspectsSetter(
287       bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
288     (void)mayBlock;
289     if (me.v.range > C2Color::RANGE_OTHER) {
290       me.set().range = C2Color::RANGE_OTHER;
291     }
292     if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
293       me.set().primaries = C2Color::PRIMARIES_OTHER;
294     }
295     if (me.v.transfer > C2Color::TRANSFER_OTHER) {
296       me.set().transfer = C2Color::TRANSFER_OTHER;
297     }
298     if (me.v.matrix > C2Color::MATRIX_OTHER) {
299       me.set().matrix = C2Color::MATRIX_OTHER;
300     }
301     return C2R::Ok();
302   }
303 
CodedColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::input> & me)304   static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
305     (void)mayBlock;
306     if (me.v.range > C2Color::RANGE_OTHER) {
307       me.set().range = C2Color::RANGE_OTHER;
308     }
309     if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
310       me.set().primaries = C2Color::PRIMARIES_OTHER;
311     }
312     if (me.v.transfer > C2Color::TRANSFER_OTHER) {
313       me.set().transfer = C2Color::TRANSFER_OTHER;
314     }
315     if (me.v.matrix > C2Color::MATRIX_OTHER) {
316       me.set().matrix = C2Color::MATRIX_OTHER;
317     }
318     return C2R::Ok();
319   }
320 
ColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::output> & me,const C2P<C2StreamColorAspectsTuning::output> & def,const C2P<C2StreamColorAspectsInfo::input> & coded)321   static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
322                                 const C2P<C2StreamColorAspectsTuning::output> &def,
323                                 const C2P<C2StreamColorAspectsInfo::input> &coded) {
324     (void)mayBlock;
325     // take default values for all unspecified fields, and coded values for specified ones
326     me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
327     me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
328         ? def.v.primaries : coded.v.primaries;
329     me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
330         ? def.v.transfer : coded.v.transfer;
331     me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
332     return C2R::Ok();
333   }
334 
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::input> & me,const C2P<C2StreamPictureSizeInfo::output> & size)335   static C2R ProfileLevelSetter(
336       bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
337       const C2P<C2StreamPictureSizeInfo::output> &size) {
338     (void)mayBlock;
339     (void)size;
340     (void)me;  // TODO: validate
341     return C2R::Ok();
342   }
343 
344   std::shared_ptr<C2StreamColorAspectsTuning::output>
getDefaultColorAspects_l()345   getDefaultColorAspects_l() {
346     return mDefaultColorAspects;
347   }
348 
getColorAspects_l()349   std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
350       return mColorAspects;
351   }
352 
Hdr10PlusInfoInputSetter(bool mayBlock,C2P<C2StreamHdr10PlusInfo::input> & me)353   static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
354                                       C2P<C2StreamHdr10PlusInfo::input> &me) {
355     (void)mayBlock;
356     (void)me;  // TODO: validate
357     return C2R::Ok();
358   }
359 
Hdr10PlusInfoOutputSetter(bool mayBlock,C2P<C2StreamHdr10PlusInfo::output> & me)360   static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
361                                        C2P<C2StreamHdr10PlusInfo::output> &me) {
362     (void)mayBlock;
363     (void)me;  // TODO: validate
364     return C2R::Ok();
365   }
366 
367   // unsafe getters
getPixelFormat_l() const368   std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
369 
HdrStaticInfoSetter(bool mayBlock,C2P<C2StreamHdrStaticInfo::output> & me)370   static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
371     (void)mayBlock;
372     if (me.v.mastering.red.x > 1) {
373       me.set().mastering.red.x = 1;
374     }
375     if (me.v.mastering.red.y > 1) {
376       me.set().mastering.red.y = 1;
377     }
378     if (me.v.mastering.green.x > 1) {
379       me.set().mastering.green.x = 1;
380     }
381     if (me.v.mastering.green.y > 1) {
382       me.set().mastering.green.y = 1;
383     }
384     if (me.v.mastering.blue.x > 1) {
385       me.set().mastering.blue.x = 1;
386     }
387     if (me.v.mastering.blue.y > 1) {
388       me.set().mastering.blue.y = 1;
389     }
390     if (me.v.mastering.white.x > 1) {
391       me.set().mastering.white.x = 1;
392     }
393     if (me.v.mastering.white.y > 1) {
394       me.set().mastering.white.y = 1;
395     }
396     if (me.v.mastering.maxLuminance > 65535.0) {
397       me.set().mastering.maxLuminance = 65535.0;
398     }
399     if (me.v.mastering.minLuminance > 6.5535) {
400       me.set().mastering.minLuminance = 6.5535;
401     }
402     if (me.v.maxCll > 65535.0) {
403       me.set().maxCll = 65535.0;
404     }
405     if (me.v.maxFall > 65535.0) {
406       me.set().maxFall = 65535.0;
407     }
408     return C2R::Ok();
409   }
410 
411  private:
412   std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
413   std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
414   std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
415   std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
416   std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
417   std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
418   std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
419   std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
420   std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
421   std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
422   std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
423   std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
424 };
425 
C2SoftGav1Dec(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)426 C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
427                              const std::shared_ptr<IntfImpl> &intfImpl)
428     : SimpleC2Component(
429           std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
430       mIntf(intfImpl),
431       mCodecCtx(nullptr) {
432   mTimeStart = mTimeEnd = systemTime();
433 }
434 
~C2SoftGav1Dec()435 C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
436 
onInit()437 c2_status_t C2SoftGav1Dec::onInit() {
438   return initDecoder() ? C2_OK : C2_CORRUPTED;
439 }
440 
onStop()441 c2_status_t C2SoftGav1Dec::onStop() {
442   mSignalledError = false;
443   mSignalledOutputEos = false;
444   return C2_OK;
445 }
446 
onReset()447 void C2SoftGav1Dec::onReset() {
448   (void)onStop();
449   c2_status_t err = onFlush_sm();
450   if (err != C2_OK) {
451     ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
452     destroyDecoder();
453     if (!initDecoder()) {
454       ALOGE("Hard reset failed.");
455     }
456   }
457 }
458 
onRelease()459 void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
460 
onFlush_sm()461 c2_status_t C2SoftGav1Dec::onFlush_sm() {
462   Libgav1StatusCode status = mCodecCtx->SignalEOS();
463   if (status != kLibgav1StatusOk) {
464     ALOGE("Failed to flush av1 decoder. status: %d.", status);
465     return C2_CORRUPTED;
466   }
467 
468   // Dequeue frame (if any) that was enqueued previously.
469   const libgav1::DecoderBuffer *buffer;
470   status = mCodecCtx->DequeueFrame(&buffer);
471   if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
472     ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
473           status);
474     return C2_CORRUPTED;
475   }
476 
477   mSignalledError = false;
478   mSignalledOutputEos = false;
479 
480   return C2_OK;
481 }
482 
GetCPUCoreCount()483 static int GetCPUCoreCount() {
484   int cpuCoreCount = 1;
485 #if defined(_SC_NPROCESSORS_ONLN)
486   cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
487 #else
488   // _SC_NPROC_ONLN must be defined...
489   cpuCoreCount = sysconf(_SC_NPROC_ONLN);
490 #endif
491   CHECK(cpuCoreCount >= 1);
492   ALOGV("Number of CPU cores: %d", cpuCoreCount);
493   return cpuCoreCount;
494 }
495 
initDecoder()496 bool C2SoftGav1Dec::initDecoder() {
497   mSignalledError = false;
498   mSignalledOutputEos = false;
499   mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
500   {
501       IntfImpl::Lock lock = mIntf->lock();
502       mPixelFormatInfo = mIntf->getPixelFormat_l();
503   }
504   mCodecCtx.reset(new libgav1::Decoder());
505 
506   if (mCodecCtx == nullptr) {
507     ALOGE("mCodecCtx is null");
508     return false;
509   }
510 
511   libgav1::DecoderSettings settings = {};
512   settings.threads = GetCPUCoreCount();
513   int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
514   if (numThreads > 0 && numThreads < settings.threads) {
515     settings.threads = numThreads;
516   }
517 
518   ALOGV("Using libgav1 AV1 software decoder.");
519   Libgav1StatusCode status = mCodecCtx->Init(&settings);
520   if (status != kLibgav1StatusOk) {
521     ALOGE("av1 decoder failed to initialize. status: %d.", status);
522     return false;
523   }
524 
525   return true;
526 }
527 
destroyDecoder()528 void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
529 
fillEmptyWork(const std::unique_ptr<C2Work> & work)530 void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
531   uint32_t flags = 0;
532   if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
533     flags |= C2FrameData::FLAG_END_OF_STREAM;
534     ALOGV("signalling eos");
535   }
536   work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
537   work->worklets.front()->output.buffers.clear();
538   work->worklets.front()->output.ordinal = work->input.ordinal;
539   work->workletsProcessed = 1u;
540 }
541 
finishWork(uint64_t index,const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2GraphicBlock> & block)542 void C2SoftGav1Dec::finishWork(uint64_t index,
543                                const std::unique_ptr<C2Work> &work,
544                                const std::shared_ptr<C2GraphicBlock> &block) {
545   std::shared_ptr<C2Buffer> buffer =
546       createGraphicBuffer(block, C2Rect(mWidth, mHeight));
547   {
548       IntfImpl::Lock lock = mIntf->lock();
549       buffer->setInfo(mIntf->getColorAspects_l());
550   }
551   auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
552     uint32_t flags = 0;
553     if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
554         (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
555       flags |= C2FrameData::FLAG_END_OF_STREAM;
556       ALOGV("signalling eos");
557     }
558     work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
559     work->worklets.front()->output.buffers.clear();
560     work->worklets.front()->output.buffers.push_back(buffer);
561     work->worklets.front()->output.ordinal = work->input.ordinal;
562     work->workletsProcessed = 1u;
563   };
564   if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
565     fillWork(work);
566   } else {
567     finish(index, fillWork);
568   }
569 }
570 
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)571 void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
572                             const std::shared_ptr<C2BlockPool> &pool) {
573   work->result = C2_OK;
574   work->workletsProcessed = 0u;
575   work->worklets.front()->output.configUpdate.clear();
576   work->worklets.front()->output.flags = work->input.flags;
577   if (mSignalledError || mSignalledOutputEos) {
578     work->result = C2_BAD_VALUE;
579     return;
580   }
581 
582   size_t inOffset = 0u;
583   size_t inSize = 0u;
584   C2ReadView rView = mDummyReadView;
585   if (!work->input.buffers.empty()) {
586     rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
587     inSize = rView.capacity();
588     if (inSize && rView.error()) {
589       ALOGE("read view map failed %d", rView.error());
590       work->result = C2_CORRUPTED;
591       return;
592     }
593   }
594 
595   bool codecConfig =
596       ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
597   bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
598 
599   ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
600         (int)work->input.ordinal.timestamp.peeku(),
601         (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
602 
603   if (codecConfig) {
604     fillEmptyWork(work);
605     return;
606   }
607 
608   int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
609   if (inSize) {
610     uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
611 
612     mTimeStart = systemTime();
613     nsecs_t delay = mTimeStart - mTimeEnd;
614 
615     const Libgav1StatusCode status =
616         mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
617                                 /*buffer_private_data=*/nullptr);
618 
619     mTimeEnd = systemTime();
620     nsecs_t decodeTime = mTimeEnd - mTimeStart;
621     ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
622 
623     if (status != kLibgav1StatusOk) {
624       ALOGE("av1 decoder failed to decode frame. status: %d.", status);
625       work->result = C2_CORRUPTED;
626       work->workletsProcessed = 1u;
627       mSignalledError = true;
628       return;
629     }
630 
631   }
632 
633   (void)outputBuffer(pool, work);
634 
635   if (eos) {
636     drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
637     mSignalledOutputEos = true;
638   } else if (!inSize) {
639     fillEmptyWork(work);
640   }
641 }
642 
getHDRStaticParams(const libgav1::DecoderBuffer * buffer,const std::unique_ptr<C2Work> & work)643 void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
644                                        const std::unique_ptr<C2Work> &work) {
645   C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
646   bool infoPresent = false;
647   if (buffer->has_hdr_mdcv) {
648     // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
649     hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
650     hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
651 
652     hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
653     hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
654 
655     hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
656     hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
657 
658     // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
659     hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
660     hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
661 
662     // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
663     hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
664     // hdr_mdcv.luminance_min is in 18.14 format.
665     hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
666     infoPresent = true;
667   }
668 
669   if (buffer->has_hdr_cll) {
670     hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
671     hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
672     infoPresent = true;
673   }
674   // config if static info has changed
675   if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
676     mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
677     work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
678   }
679 }
680 
getHDR10PlusInfoData(const libgav1::DecoderBuffer * buffer,const std::unique_ptr<C2Work> & work)681 void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
682                                          const std::unique_ptr<C2Work> &work) {
683   if (buffer->has_itut_t35) {
684     std::vector<uint8_t> payload;
685     size_t payloadSize = buffer->itut_t35.payload_size;
686     if (payloadSize > 0) {
687       payload.push_back(buffer->itut_t35.country_code);
688       if (buffer->itut_t35.country_code == 0xFF) {
689         payload.push_back(buffer->itut_t35.country_code_extension_byte);
690       }
691       payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
692                      buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
693     }
694 
695     std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
696             C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
697     if (!hdr10PlusInfo) {
698       ALOGE("Hdr10PlusInfo allocation failed");
699       mSignalledError = true;
700       work->result = C2_NO_MEMORY;
701       return;
702     }
703     memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
704 
705     // config if hdr10Plus info has changed
706     if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
707       mHdr10PlusInfo = std::move(hdr10PlusInfo);
708       work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
709     }
710   }
711 }
712 
getVuiParams(const libgav1::DecoderBuffer * buffer)713 void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
714     VuiColorAspects vuiColorAspects;
715     vuiColorAspects.primaries = buffer->color_primary;
716     vuiColorAspects.transfer = buffer->transfer_characteristics;
717     vuiColorAspects.coeffs = buffer->matrix_coefficients;
718     vuiColorAspects.fullRange = buffer->color_range;
719 
720     // convert vui aspects to C2 values if changed
721     if (!(vuiColorAspects == mBitstreamColorAspects)) {
722         mBitstreamColorAspects = vuiColorAspects;
723         ColorAspects sfAspects;
724         C2StreamColorAspectsInfo::input codedAspects = { 0u };
725         ColorUtils::convertIsoColorAspectsToCodecAspects(
726                 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
727                 vuiColorAspects.fullRange, sfAspects);
728         if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
729             codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
730         }
731         if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
732             codedAspects.range = C2Color::RANGE_UNSPECIFIED;
733         }
734         if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
735             codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
736         }
737         if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
738             codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
739         }
740         std::vector<std::unique_ptr<C2SettingResult>> failures;
741         mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
742     }
743 }
744 
setError(const std::unique_ptr<C2Work> & work,c2_status_t error)745 void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
746     mSignalledError = true;
747     work->result = error;
748     work->workletsProcessed = 1u;
749 }
750 
allocTmpFrameBuffer(size_t size)751 bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
752     if (size > mTmpFrameBufferSize) {
753         mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
754         if (mTmpFrameBuffer == nullptr) {
755             mTmpFrameBufferSize = 0;
756             return false;
757         }
758         mTmpFrameBufferSize = size;
759     }
760     return true;
761 }
762 
fillMonochromeRow(int value)763 bool C2SoftGav1Dec::fillMonochromeRow(int value) {
764     const size_t tmpSize = mWidth;
765     const bool needFill = tmpSize > mTmpFrameBufferSize;
766     if (!allocTmpFrameBuffer(tmpSize)) {
767         ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
768         return false;
769     }
770     if (needFill) {
771         std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
772     }
773     return true;
774 }
775 
outputBuffer(const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)776 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
777                                  const std::unique_ptr<C2Work> &work) {
778   if (!(work && pool)) return false;
779 
780   const libgav1::DecoderBuffer *buffer;
781   const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
782 
783   if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
784     ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
785     return false;
786   }
787 
788   // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
789   // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
790   // of two things:
791   //  - The EnqueueFrame() call was either a flush (called with nullptr).
792   //  - The enqueued frame did not have any displayable frames.
793   if (!buffer) {
794     return false;
795   }
796 
797 #if LIBYUV_VERSION < 1871
798   if (buffer->bitdepth > 10) {
799     ALOGE("bitdepth %d is not supported", buffer->bitdepth);
800     mSignalledError = true;
801     work->workletsProcessed = 1u;
802     work->result = C2_CORRUPTED;
803     return false;
804   }
805 #endif
806 
807   const int width = buffer->displayed_width[0];
808   const int height = buffer->displayed_height[0];
809   if (width != mWidth || height != mHeight) {
810     mWidth = width;
811     mHeight = height;
812 
813     C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
814     std::vector<std::unique_ptr<C2SettingResult>> failures;
815     c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
816     if (err == C2_OK) {
817       work->worklets.front()->output.configUpdate.push_back(
818           C2Param::Copy(size));
819     } else {
820       ALOGE("Config update size failed");
821       mSignalledError = true;
822       work->result = C2_CORRUPTED;
823       work->workletsProcessed = 1u;
824       return false;
825     }
826   }
827 
828   getVuiParams(buffer);
829   getHDRStaticParams(buffer, work);
830   getHDR10PlusInfoData(buffer, work);
831 
832 #if LIBYUV_VERSION < 1779
833   if (buffer->bitdepth == 10 &&
834       !(buffer->image_format == libgav1::kImageFormatYuv420 ||
835         buffer->image_format == libgav1::kImageFormatMonochrome400)) {
836     ALOGE("image_format %d not supported for 10bit", buffer->image_format);
837     mSignalledError = true;
838     work->workletsProcessed = 1u;
839     work->result = C2_CORRUPTED;
840     return false;
841   }
842 #endif
843 
844   const bool isMonochrome =
845       buffer->image_format == libgav1::kImageFormatMonochrome400;
846 
847   std::shared_ptr<C2GraphicBlock> block;
848   uint32_t format = HAL_PIXEL_FORMAT_YV12;
849   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
850   if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
851     IntfImpl::Lock lock = mIntf->lock();
852     codedColorAspects = mIntf->getColorAspects_l();
853     bool allowRGBA1010102 = false;
854     if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
855         codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
856         codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
857       allowRGBA1010102 = true;
858     }
859     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
860 #if !HAVE_LIBYUV_I410_I210_TO_AB30
861     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
862         (buffer->image_format != libgav1::kImageFormatYuv420) &&
863         (buffer->bitdepth == 10)) {
864         ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
865       mSignalledError = true;
866       work->result = C2_OMITTED;
867       work->workletsProcessed = 1u;
868       return false;
869     }
870 #endif
871   }
872   if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
873       (buffer->image_format == libgav1::kImageFormatYuv422 ||
874        buffer->image_format == libgav1::kImageFormatYuv444)) {
875       // There are no 12-bit color conversion functions from YUV422/YUV444 to
876       // RGBA_1010102. Use 8-bit YV12 in this case.
877       format = HAL_PIXEL_FORMAT_YV12;
878   }
879   if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
880       // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
881       // in this case.
882       format = HAL_PIXEL_FORMAT_YV12;
883   }
884 
885   if (mHalPixelFormat != format) {
886     C2StreamPixelFormatInfo::output pixelFormat(0u, format);
887     std::vector<std::unique_ptr<C2SettingResult>> failures;
888     c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
889     if (err == C2_OK) {
890       work->worklets.front()->output.configUpdate.push_back(
891           C2Param::Copy(pixelFormat));
892     } else {
893       ALOGE("Config update pixelFormat failed");
894       mSignalledError = true;
895       work->workletsProcessed = 1u;
896       work->result = C2_CORRUPTED;
897       return UNKNOWN_ERROR;
898     }
899     mHalPixelFormat = format;
900   }
901 
902   C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
903 
904   // We always create a graphic block that is width aligned to 16 and height
905   // aligned to 2. We set the correct "crop" value of the image in the call to
906   // createGraphicBuffer() by setting the correct image dimensions.
907   c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
908                                             align(mHeight, 2), format, usage,
909                                             &block);
910 
911   if (err != C2_OK) {
912     ALOGE("fetchGraphicBlock for Output failed with status %d", err);
913     work->result = err;
914     return false;
915   }
916 
917   C2GraphicView wView = block->map().get();
918 
919   if (wView.error()) {
920     ALOGE("graphic view map failed %d", wView.error());
921     work->result = C2_CORRUPTED;
922     return false;
923   }
924 
925   ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
926         block->height(), mWidth, mHeight, (int)buffer->user_private_data);
927 
928   uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
929   uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
930   uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
931 
932   C2PlanarLayout layout = wView.layout();
933   size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
934   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
935   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
936 
937   if (buffer->bitdepth == 12) {
938 #if LIBYUV_VERSION >= 1871
939       const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
940       const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
941       const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
942       size_t srcYStride = buffer->stride[0] / 2;
943       size_t srcUStride = buffer->stride[1] / 2;
944       size_t srcVStride = buffer->stride[2] / 2;
945       if (isMonochrome) {
946           if (!fillMonochromeRow(2048)) {
947               setError(work, C2_NO_MEMORY);
948               return false;
949           }
950           srcU = srcV = mTmpFrameBuffer.get();
951           srcUStride = srcVStride = 0;
952       }
953       if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
954           libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
955                                    dstY, dstYStride, &libyuv::kYuvV2020Constants,
956                                    mWidth, mHeight);
957       } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
958           libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
959                              dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
960                              mWidth, mHeight);
961       } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
962           libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
963                              dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
964                              mWidth, mHeight);
965       } else {
966           libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
967                              dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
968                              mWidth, mHeight);
969       }
970 #endif  // LIBYUV_VERSION >= 1871
971   } else if (buffer->bitdepth == 10) {
972     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
973     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
974     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
975     size_t srcYStride = buffer->stride[0] / 2;
976     size_t srcUStride = buffer->stride[1] / 2;
977     size_t srcVStride = buffer->stride[2] / 2;
978 
979     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
980         bool processed = false;
981 #if HAVE_LIBYUV_I410_I210_TO_AB30
982         if (buffer->image_format == libgav1::kImageFormatYuv444) {
983             libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
984                                      dstY, dstYStride, &libyuv::kYuvV2020Constants,
985                                      mWidth, mHeight);
986             processed = true;
987         } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
988             libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
989                                      dstY, dstYStride, &libyuv::kYuvV2020Constants,
990                                      mWidth, mHeight);
991             processed = true;
992         }
993 #endif  // HAVE_LIBYUV_I410_I210_TO_AB30
994         if (!processed) {
995             if (isMonochrome) {
996                 if (!fillMonochromeRow(512)) {
997                     setError(work, C2_NO_MEMORY);
998                     return false;
999                 }
1000                 srcU = srcV = mTmpFrameBuffer.get();
1001                 srcUStride = srcVStride = 0;
1002             }
1003             convertYUV420Planar16ToY410OrRGBA1010102(
1004                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
1005                     srcUStride, srcVStride,
1006                     dstYStride / sizeof(uint32_t), mWidth, mHeight,
1007                     std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
1008         }
1009     } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1010         dstYStride /= 2;
1011         dstUStride /= 2;
1012         dstVStride /= 2;
1013 #if LIBYUV_VERSION >= 1779
1014         if (buffer->image_format == libgav1::kImageFormatYuv444 ||
1015             buffer->image_format == libgav1::kImageFormatYuv422) {
1016             // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
1017             // libyuv::I210ToP010 when they are available.
1018             // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
1019             // guarantees.
1020             const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1021             if (!allocTmpFrameBuffer(tmpSize)) {
1022                 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1023                 setError(work, C2_NO_MEMORY);
1024                 return false;
1025             }
1026             uint16_t *const tmpY = mTmpFrameBuffer.get();
1027             uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1028             uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1029             if (buffer->image_format == libgav1::kImageFormatYuv444) {
1030                 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1031                                    tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1032                                    mWidth, mHeight);
1033             } else {
1034                 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1035                                    tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1036                                    mWidth, mHeight);
1037             }
1038             libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1039                                (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
1040                                mWidth, mHeight);
1041         } else {
1042             convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
1043                                         srcYStride, srcUStride, srcVStride, dstYStride,
1044                                         dstUStride, mWidth, mHeight, isMonochrome);
1045         }
1046 #else  // LIBYUV_VERSION < 1779
1047         convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
1048                                     srcYStride, srcUStride, srcVStride, dstYStride,
1049                                     dstUStride, mWidth, mHeight, isMonochrome);
1050 #endif  // LIBYUV_VERSION >= 1779
1051     } else {
1052 #if LIBYUV_VERSION >= 1779
1053         if (buffer->image_format == libgav1::kImageFormatYuv444) {
1054             // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
1055             // it's available.
1056             const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1057             if (!allocTmpFrameBuffer(tmpSize)) {
1058                 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1059                 setError(work, C2_NO_MEMORY);
1060                 return false;
1061             }
1062             uint16_t *const tmpY = mTmpFrameBuffer.get();
1063             uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1064             uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1065             libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1066                                tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1067                                mWidth, mHeight);
1068             libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1069                                dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1070                                mWidth, mHeight);
1071         } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1072             libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1073                                dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1074                                mWidth, mHeight);
1075         } else {
1076             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1077                                         srcUStride, srcVStride, dstYStride, dstUStride,
1078                                         mWidth, mHeight, isMonochrome);
1079         }
1080 #else  // LIBYUV_VERSION < 1779
1081         convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1082                                     srcUStride, srcVStride, dstYStride, dstUStride,
1083                                     mWidth, mHeight, isMonochrome);
1084 #endif  // LIBYUV_VERSION >= 1779
1085     }
1086   } else {
1087     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
1088     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
1089     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
1090     size_t srcYStride = buffer->stride[0];
1091     size_t srcUStride = buffer->stride[1];
1092     size_t srcVStride = buffer->stride[2];
1093 
1094     if (buffer->image_format == libgav1::kImageFormatYuv444) {
1095         libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1096                            dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1097                            mWidth, mHeight);
1098     } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1099         libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1100                            dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1101                            mWidth, mHeight);
1102     } else {
1103         convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1104                                    srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1105                                    isMonochrome);
1106     }
1107   }
1108   finishWork(buffer->user_private_data, work, std::move(block));
1109   block = nullptr;
1110   return true;
1111 }
1112 
drainInternal(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)1113 c2_status_t C2SoftGav1Dec::drainInternal(
1114     uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1115     const std::unique_ptr<C2Work> &work) {
1116   if (drainMode == NO_DRAIN) {
1117     ALOGW("drain with NO_DRAIN: no-op");
1118     return C2_OK;
1119   }
1120   if (drainMode == DRAIN_CHAIN) {
1121     ALOGW("DRAIN_CHAIN not supported");
1122     return C2_OMITTED;
1123   }
1124 
1125   const Libgav1StatusCode status = mCodecCtx->SignalEOS();
1126   if (status != kLibgav1StatusOk) {
1127     ALOGE("Failed to flush av1 decoder. status: %d.", status);
1128     return C2_CORRUPTED;
1129   }
1130 
1131   while (outputBuffer(pool, work)) {
1132   }
1133 
1134   if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1135       work->workletsProcessed == 0u) {
1136     fillEmptyWork(work);
1137   }
1138 
1139   return C2_OK;
1140 }
1141 
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)1142 c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1143                                  const std::shared_ptr<C2BlockPool> &pool) {
1144   return drainInternal(drainMode, pool, nullptr);
1145 }
1146 
1147 class C2SoftGav1Factory : public C2ComponentFactory {
1148  public:
C2SoftGav1Factory()1149   C2SoftGav1Factory()
1150       : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1151             GetCodec2PlatformComponentStore()->getParamReflector())) {}
1152 
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)1153   virtual c2_status_t createComponent(
1154       c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1155       std::function<void(C2Component *)> deleter) override {
1156     *component = std::shared_ptr<C2Component>(
1157         new C2SoftGav1Dec(COMPONENT_NAME, id,
1158                           std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1159         deleter);
1160     return C2_OK;
1161   }
1162 
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)1163   virtual c2_status_t createInterface(
1164       c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1165       std::function<void(C2ComponentInterface *)> deleter) override {
1166     *interface = std::shared_ptr<C2ComponentInterface>(
1167         new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1168             COMPONENT_NAME, id,
1169             std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1170         deleter);
1171     return C2_OK;
1172   }
1173 
1174   virtual ~C2SoftGav1Factory() override = default;
1175 
1176  private:
1177   std::shared_ptr<C2ReflectorHelper> mHelper;
1178 };
1179 
1180 }  // namespace android
1181 
1182 __attribute__((cfi_canonical_jump_table))
CreateCodec2Factory()1183 extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1184   ALOGV("in %s", __func__);
1185   return new ::android::C2SoftGav1Factory();
1186 }
1187 
1188 __attribute__((cfi_canonical_jump_table))
DestroyCodec2Factory(::C2ComponentFactory * factory)1189 extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1190   ALOGV("in %s", __func__);
1191   delete factory;
1192 }
1193