1 // Copyright 2020 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 //#define LOG_NDEBUG 0
6 #define LOG_TAG "V4L2DecodeInterface"
7
8 #include <v4l2_codec2/components/V4L2DecodeInterface.h>
9
10 #include <C2PlatformSupport.h>
11 #include <SimpleC2Interface.h>
12 #include <android/hardware/graphics/common/1.0/types.h>
13 #include <log/log.h>
14 #include <media/stagefright/foundation/MediaDefs.h>
15
16 #include <v4l2_codec2/common/V4L2ComponentCommon.h>
17 #include <v4l2_codec2/common/V4L2Device.h>
18 #include <v4l2_codec2/plugin_store/V4L2AllocatorId.h>
19
20 namespace android {
21 namespace {
22
23 constexpr size_t k1080pArea = 1920 * 1088;
24 constexpr size_t k4KArea = 3840 * 2160;
25 // Input bitstream buffer size for up to 1080p streams.
26 constexpr size_t kInputBufferSizeFor1080p = 1024 * 1024; // 1MB
27 // Input bitstream buffer size for up to 4k streams.
28 constexpr size_t kInputBufferSizeFor4K = 4 * kInputBufferSizeFor1080p;
29
getCodecFromComponentName(const std::string & name)30 std::optional<VideoCodec> getCodecFromComponentName(const std::string& name) {
31 if (name == V4L2ComponentName::kH264Decoder || name == V4L2ComponentName::kH264SecureDecoder)
32 return VideoCodec::H264;
33 if (name == V4L2ComponentName::kVP8Decoder || name == V4L2ComponentName::kVP8SecureDecoder)
34 return VideoCodec::VP8;
35 if (name == V4L2ComponentName::kVP9Decoder || name == V4L2ComponentName::kVP9SecureDecoder)
36 return VideoCodec::VP9;
37
38 ALOGE("Unknown name: %s", name.c_str());
39 return std::nullopt;
40 }
41
calculateInputBufferSize(size_t area)42 size_t calculateInputBufferSize(size_t area) {
43 if (area > k4KArea) {
44 ALOGW("Input buffer size for video size (%zu) larger than 4K (%zu) might be too small.",
45 area, k4KArea);
46 }
47
48 // Enlarge the input buffer for 4k video
49 if (area > k1080pArea) return kInputBufferSizeFor4K;
50 return kInputBufferSizeFor1080p;
51 }
52 } // namespace
53
54 // static
ProfileLevelSetter(bool,C2P<C2StreamProfileLevelInfo::input> & info)55 C2R V4L2DecodeInterface::ProfileLevelSetter(bool /* mayBlock */,
56 C2P<C2StreamProfileLevelInfo::input>& info) {
57 return info.F(info.v.profile)
58 .validatePossible(info.v.profile)
59 .plus(info.F(info.v.level).validatePossible(info.v.level));
60 }
61
62 // static
SizeSetter(bool,C2P<C2StreamPictureSizeInfo::output> & videoSize)63 C2R V4L2DecodeInterface::SizeSetter(bool /* mayBlock */,
64 C2P<C2StreamPictureSizeInfo::output>& videoSize) {
65 return videoSize.F(videoSize.v.width)
66 .validatePossible(videoSize.v.width)
67 .plus(videoSize.F(videoSize.v.height).validatePossible(videoSize.v.height));
68 }
69
70 // static
71 template <typename T>
DefaultColorAspectsSetter(bool,C2P<T> & def)72 C2R V4L2DecodeInterface::DefaultColorAspectsSetter(bool /* mayBlock */, C2P<T>& def) {
73 if (def.v.range > C2Color::RANGE_OTHER) {
74 def.set().range = C2Color::RANGE_OTHER;
75 }
76 if (def.v.primaries > C2Color::PRIMARIES_OTHER) {
77 def.set().primaries = C2Color::PRIMARIES_OTHER;
78 }
79 if (def.v.transfer > C2Color::TRANSFER_OTHER) {
80 def.set().transfer = C2Color::TRANSFER_OTHER;
81 }
82 if (def.v.matrix > C2Color::MATRIX_OTHER) {
83 def.set().matrix = C2Color::MATRIX_OTHER;
84 }
85 return C2R::Ok();
86 }
87
88 // static
MergedColorAspectsSetter(bool,C2P<C2StreamColorAspectsInfo::output> & merged,const C2P<C2StreamColorAspectsTuning::output> & def,const C2P<C2StreamColorAspectsInfo::input> & coded)89 C2R V4L2DecodeInterface::MergedColorAspectsSetter(
90 bool /* mayBlock */, C2P<C2StreamColorAspectsInfo::output>& merged,
91 const C2P<C2StreamColorAspectsTuning::output>& def,
92 const C2P<C2StreamColorAspectsInfo::input>& coded) {
93 // Take coded values for all specified fields, and default values for unspecified ones.
94 merged.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
95 merged.set().primaries =
96 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
97 merged.set().transfer =
98 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
99 merged.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
100 return C2R::Ok();
101 }
102
103 // static
MaxInputBufferSizeCalculator(bool,C2P<C2StreamMaxBufferSizeInfo::input> & me,const C2P<C2StreamPictureSizeInfo::output> & size)104 C2R V4L2DecodeInterface::MaxInputBufferSizeCalculator(
105 bool /* mayBlock */, C2P<C2StreamMaxBufferSizeInfo::input>& me,
106 const C2P<C2StreamPictureSizeInfo::output>& size) {
107 me.set().value = calculateInputBufferSize(size.v.width * size.v.height);
108 return C2R::Ok();
109 }
110
V4L2DecodeInterface(const std::string & name,const std::shared_ptr<C2ReflectorHelper> & helper)111 V4L2DecodeInterface::V4L2DecodeInterface(const std::string& name,
112 const std::shared_ptr<C2ReflectorHelper>& helper)
113 : C2InterfaceHelper(helper), mInitStatus(C2_OK) {
114 ALOGV("%s(%s)", __func__, name.c_str());
115
116 setDerivedInstance(this);
117
118 mVideoCodec = getCodecFromComponentName(name);
119 if (!mVideoCodec) {
120 ALOGE("Invalid component name: %s", name.c_str());
121 mInitStatus = C2_BAD_VALUE;
122 return;
123 }
124
125 addParameter(DefineParam(mKind, C2_PARAMKEY_COMPONENT_KIND)
126 .withConstValue(new C2ComponentKindSetting(C2Component::KIND_DECODER))
127 .build());
128
129 std::string inputMime;
130 switch (*mVideoCodec) {
131 case VideoCodec::H264:
132 inputMime = MEDIA_MIMETYPE_VIDEO_AVC;
133 addParameter(
134 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
135 .withDefault(new C2StreamProfileLevelInfo::input(
136 0u, C2Config::PROFILE_AVC_MAIN, C2Config::LEVEL_AVC_4))
137 .withFields(
138 {C2F(mProfileLevel, profile)
139 .oneOf({C2Config::PROFILE_AVC_BASELINE,
140 C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
141 C2Config::PROFILE_AVC_MAIN,
142 C2Config::PROFILE_AVC_HIGH,
143 C2Config::PROFILE_AVC_CONSTRAINED_HIGH}),
144 C2F(mProfileLevel, level)
145 .oneOf({C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B,
146 C2Config::LEVEL_AVC_1_1, C2Config::LEVEL_AVC_1_2,
147 C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
148 C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2,
149 C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1,
150 C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
151 C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2,
152 C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1,
153 C2Config::LEVEL_AVC_5_2})})
154 .withSetter(ProfileLevelSetter)
155 .build());
156 break;
157
158 case VideoCodec::VP8:
159 inputMime = MEDIA_MIMETYPE_VIDEO_VP8;
160 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
161 .withConstValue(new C2StreamProfileLevelInfo::input(
162 0u, C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
163 .build());
164 break;
165
166 case VideoCodec::VP9:
167 inputMime = MEDIA_MIMETYPE_VIDEO_VP9;
168 addParameter(
169 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
170 .withDefault(new C2StreamProfileLevelInfo::input(
171 0u, C2Config::PROFILE_VP9_0, C2Config::LEVEL_VP9_5))
172 .withFields({C2F(mProfileLevel, profile).oneOf({C2Config::PROFILE_VP9_0}),
173 C2F(mProfileLevel, level)
174 .oneOf({C2Config::LEVEL_VP9_1, C2Config::LEVEL_VP9_1_1,
175 C2Config::LEVEL_VP9_2, C2Config::LEVEL_VP9_2_1,
176 C2Config::LEVEL_VP9_3, C2Config::LEVEL_VP9_3_1,
177 C2Config::LEVEL_VP9_4, C2Config::LEVEL_VP9_4_1,
178 C2Config::LEVEL_VP9_5})})
179 .withSetter(ProfileLevelSetter)
180 .build());
181 break;
182 }
183
184 addParameter(
185 DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
186 .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
187 .build());
188 addParameter(
189 DefineParam(mInputMemoryUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
190 .withConstValue(new C2StreamUsageTuning::input(
191 0u, static_cast<uint64_t>(android::hardware::graphics::common::V1_0::
192 BufferUsage::VIDEO_DECODER)))
193 .build());
194
195 addParameter(DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
196 .withConstValue(
197 new C2StreamBufferTypeSetting::output(0u, C2BufferData::GRAPHIC))
198 .build());
199 addParameter(
200 DefineParam(mOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
201 .withConstValue(new C2PortDelayTuning::output(getOutputDelay(*mVideoCodec)))
202 .build());
203
204 addParameter(DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
205 .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
206 inputMime.c_str()))
207 .build());
208
209 addParameter(DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
210 .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
211 MEDIA_MIMETYPE_VIDEO_RAW))
212 .build());
213
214 // Note(b/165826281): The check is not used at Android framework currently.
215 // In order to fasten the bootup time, we use the maximum supported size instead of querying the
216 // capability from the V4L2 device.
217 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
218 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
219 .withFields({
220 C2F(mSize, width).inRange(16, 4096, 16),
221 C2F(mSize, height).inRange(16, 4096, 16),
222 })
223 .withSetter(SizeSetter)
224 .build());
225
226 addParameter(
227 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
228 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kInputBufferSizeFor1080p))
229 .withFields({
230 C2F(mMaxInputSize, value).any(),
231 })
232 .calculatedAs(MaxInputBufferSizeCalculator, mSize)
233 .build());
234
235 bool secureMode = name.find(".secure") != std::string::npos;
236 const C2Allocator::id_t inputAllocators[] = {secureMode ? V4L2AllocatorId::SECURE_LINEAR
237 : C2AllocatorStore::DEFAULT_LINEAR};
238
239 const C2Allocator::id_t outputAllocators[] = {V4L2AllocatorId::V4L2_BUFFERPOOL};
240 const C2Allocator::id_t surfaceAllocator =
241 secureMode ? V4L2AllocatorId::SECURE_GRAPHIC : V4L2AllocatorId::V4L2_BUFFERQUEUE;
242 const C2BlockPool::local_id_t outputBlockPools[] = {C2BlockPool::BASIC_GRAPHIC};
243
244 addParameter(
245 DefineParam(mInputAllocatorIds, C2_PARAMKEY_INPUT_ALLOCATORS)
246 .withConstValue(C2PortAllocatorsTuning::input::AllocShared(inputAllocators))
247 .build());
248
249 addParameter(
250 DefineParam(mOutputAllocatorIds, C2_PARAMKEY_OUTPUT_ALLOCATORS)
251 .withConstValue(C2PortAllocatorsTuning::output::AllocShared(outputAllocators))
252 .build());
253
254 addParameter(DefineParam(mOutputSurfaceAllocatorId, C2_PARAMKEY_OUTPUT_SURFACE_ALLOCATOR)
255 .withConstValue(new C2PortSurfaceAllocatorTuning::output(surfaceAllocator))
256 .build());
257
258 addParameter(
259 DefineParam(mOutputBlockPoolIds, C2_PARAMKEY_OUTPUT_BLOCK_POOLS)
260 .withDefault(C2PortBlockPoolsTuning::output::AllocShared(outputBlockPools))
261 .withFields({C2F(mOutputBlockPoolIds, m.values[0]).any(),
262 C2F(mOutputBlockPoolIds, m.values).inRange(0, 1)})
263 .withSetter(Setter<C2PortBlockPoolsTuning::output>::NonStrictValuesWithNoDeps)
264 .build());
265
266 addParameter(
267 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
268 .withDefault(new C2StreamColorAspectsTuning::output(
269 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
270 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
271 .withFields(
272 {C2F(mDefaultColorAspects, range)
273 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
274 C2F(mDefaultColorAspects, primaries)
275 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
276 C2Color::PRIMARIES_OTHER),
277 C2F(mDefaultColorAspects, transfer)
278 .inRange(C2Color::TRANSFER_UNSPECIFIED,
279 C2Color::TRANSFER_OTHER),
280 C2F(mDefaultColorAspects, matrix)
281 .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
282 .withSetter(DefaultColorAspectsSetter)
283 .build());
284
285 addParameter(
286 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
287 .withDefault(new C2StreamColorAspectsInfo::input(
288 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
289 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
290 .withFields(
291 {C2F(mCodedColorAspects, range)
292 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
293 C2F(mCodedColorAspects, primaries)
294 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
295 C2Color::PRIMARIES_OTHER),
296 C2F(mCodedColorAspects, transfer)
297 .inRange(C2Color::TRANSFER_UNSPECIFIED,
298 C2Color::TRANSFER_OTHER),
299 C2F(mCodedColorAspects, matrix)
300 .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
301 .withSetter(DefaultColorAspectsSetter)
302 .build());
303
304 addParameter(
305 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
306 .withDefault(new C2StreamColorAspectsInfo::output(
307 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
308 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
309 .withFields(
310 {C2F(mColorAspects, range)
311 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
312 C2F(mColorAspects, primaries)
313 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
314 C2Color::PRIMARIES_OTHER),
315 C2F(mColorAspects, transfer)
316 .inRange(C2Color::TRANSFER_UNSPECIFIED,
317 C2Color::TRANSFER_OTHER),
318 C2F(mColorAspects, matrix)
319 .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
320 .withSetter(MergedColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
321 .build());
322 }
323
getInputBufferSize() const324 size_t V4L2DecodeInterface::getInputBufferSize() const {
325 return calculateInputBufferSize(mSize->width * mSize->height);
326 }
327
queryColorAspects(std::shared_ptr<C2StreamColorAspectsInfo::output> * targetColorAspects)328 c2_status_t V4L2DecodeInterface::queryColorAspects(
329 std::shared_ptr<C2StreamColorAspectsInfo::output>* targetColorAspects) {
330 std::unique_ptr<C2StreamColorAspectsInfo::output> colorAspects =
331 std::make_unique<C2StreamColorAspectsInfo::output>(
332 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
333 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED);
334 c2_status_t status = query({colorAspects.get()}, {}, C2_DONT_BLOCK, nullptr);
335 if (status == C2_OK) {
336 *targetColorAspects = std::move(colorAspects);
337 }
338 return status;
339 }
340
getOutputDelay(VideoCodec codec)341 uint32_t V4L2DecodeInterface::getOutputDelay(VideoCodec codec) {
342 switch (codec) {
343 case VideoCodec::H264:
344 // Due to frame reordering an H264 decoder might need multiple additional input frames to be
345 // queued before being able to output the associated decoded buffers. We need to tell the
346 // codec2 framework that it should not stop queuing new work items until the maximum number
347 // of frame reordering is reached, to avoid stalling the decoder.
348 return 16;
349 case VideoCodec::VP8:
350 return 0;
351 case VideoCodec::VP9:
352 return 0;
353 }
354 }
355
356 } // namespace android
357