1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef HARDWARE_API_H_
18 
19 #define HARDWARE_API_H_
20 
21 #include <media/hardware/OMXPluginBase.h>
22 #include <media/hardware/MetadataBufferType.h>
23 #include <system/window.h>
24 #include <utils/RefBase.h>
25 
26 #include "VideoAPI.h"
27 
28 #include <OMX_Component.h>
29 
30 namespace android {
31 
32 // This structure is used to enable Android native buffer use for either
33 // graphic buffers or secure buffers.
34 //
35 // TO CONTROL ANDROID GRAPHIC BUFFER USAGE:
36 //
37 // A pointer to this struct is passed to the OMX_SetParameter when the extension
38 // index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
39 // is given.
40 //
41 // When Android native buffer use is disabled for a port (the default state),
42 // the OMX node should operate as normal, and expect UseBuffer calls to set its
43 // buffers.  This is the mode that will be used when CPU access to the buffer is
44 // required.
45 //
46 // When Android native buffer use has been enabled for a given port, the video
47 // color format for the port is to be interpreted as an Android pixel format
48 // rather than an OMX color format.  Enabling Android native buffers may also
49 // change how the component receives the native buffers.  If store-metadata-mode
50 // is enabled on the port, the component will receive the buffers as specified
51 // in the section below. Otherwise, unless the node supports the
52 // 'OMX.google.android.index.useAndroidNativeBuffer2' extension, it should
53 // expect to receive UseAndroidNativeBuffer calls (via OMX_SetParameter) rather
54 // than UseBuffer calls for that port.
55 //
56 // TO CONTROL ANDROID SECURE BUFFER USAGE:
57 //
58 // A pointer to this struct is passed to the OMX_SetParameter when the extension
59 // index for the 'OMX.google.android.index.allocateNativeHandle' extension
60 // is given.
61 //
62 // When native handle use is disabled for a port (the default state),
63 // the OMX node should operate as normal, and expect AllocateBuffer calls to
64 // return buffer pointers. This is the mode that will be used for non-secure
65 // buffers if component requires allocate buffers instead of use buffers.
66 //
67 // When native handle use has been enabled for a given port, the component
68 // shall allocate native_buffer_t objects containing  that can be passed between
69 // processes using binder. This is the mode that will be used for secure buffers.
70 // When an OMX component allocates native handle for buffers, it must close and
71 // delete that handle when it frees those buffers. Even though pBuffer will point
72 // to a native handle, nFilledLength, nAllocLength and nOffset will correspond
73 // to the data inside the opaque buffer.
74 struct EnableAndroidNativeBuffersParams {
75     OMX_U32 nSize;
76     OMX_VERSIONTYPE nVersion;
77     OMX_U32 nPortIndex;
78     OMX_BOOL enable;
79 };
80 
81 typedef struct EnableAndroidNativeBuffersParams AllocateNativeHandleParams;
82 
83 // A pointer to this struct is passed to OMX_SetParameter() when the extension index
84 // "OMX.google.android.index.storeMetaDataInBuffers" or
85 // "OMX.google.android.index.storeANWBufferInMetadata" is given.
86 //
87 // When meta data is stored in the video buffers passed between OMX clients
88 // and OMX components, interpretation of the buffer data is up to the
89 // buffer receiver, and the data may or may not be the actual video data, but
90 // some information helpful for the receiver to locate the actual data.
91 // The buffer receiver thus needs to know how to interpret what is stored
92 // in these buffers, with mechanisms pre-determined externally. How to
93 // interpret the meta data is outside of the scope of this parameter.
94 //
95 // Currently, this is used to pass meta data from video source (camera component, for instance) to
96 // video encoder to avoid memcpying of input video frame data, as well as to pass dynamic output
97 // buffer to video decoder. To do this, bStoreMetaData is set to OMX_TRUE.
98 //
99 // If bStoreMetaData is set to false, real YUV frame data will be stored in input buffers, and
100 // the output buffers contain either real YUV frame data, or are themselves native handles as
101 // directed by enable/use-android-native-buffer parameter settings.
102 // In addition, if no OMX_SetParameter() call is made on a port with the corresponding extension
103 // index, the component should not assume that the client is not using metadata mode for the port.
104 //
105 // If the component supports this using the "OMX.google.android.index.storeANWBufferInMetadata"
106 // extension and bStoreMetaData is set to OMX_TRUE, data is passed using the VideoNativeMetadata
107 // layout as defined below. Each buffer will be accompanied by a fence. The fence must signal
108 // before the buffer can be used (e.g. read from or written into). When returning such buffer to
109 // the client, component must provide a new fence that must signal before the returned buffer can
110 // be used (e.g. read from or written into). The component owns the incoming fenceFd, and must close
111 // it when fence has signaled. The client will own and close the returned fence file descriptor.
112 //
113 // If the component supports this using the "OMX.google.android.index.storeMetaDataInBuffers"
114 // extension and bStoreMetaData is set to OMX_TRUE, data is passed using VideoGrallocMetadata
115 // (the layout of which is the VideoGrallocMetadata defined below). Camera input can be also passed
116 // as "CameraSource", the layout of which is vendor dependent.
117 //
118 // Metadata buffers are registered with the component using UseBuffer calls, or can be allocated
119 // by the component for encoder-metadata-output buffers.
120 struct StoreMetaDataInBuffersParams {
121     OMX_U32 nSize;
122     OMX_VERSIONTYPE nVersion;
123     OMX_U32 nPortIndex;
124     OMX_BOOL bStoreMetaData;
125 };
126 
127 // Meta data buffer layout used to transport output frames to the decoder for
128 // dynamic buffer handling.
129 struct VideoGrallocMetadata {
130     MetadataBufferType eType;               // must be kMetadataBufferTypeGrallocSource
131 #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
132     OMX_PTR pHandle;
133 #else
134     buffer_handle_t pHandle;
135 #endif
136 };
137 
138 // Legacy name for VideoGrallocMetadata struct.
139 struct VideoDecoderOutputMetaData : public VideoGrallocMetadata {};
140 
141 struct VideoNativeMetadata {
142     MetadataBufferType eType;               // must be kMetadataBufferTypeANWBuffer
143 #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
144     OMX_PTR pBuffer;
145 #else
146     struct ANativeWindowBuffer* pBuffer;
147 #endif
148     int nFenceFd;                           // -1 if unused
149 };
150 
151 // Meta data buffer layout for passing a native_handle to codec
152 struct VideoNativeHandleMetadata {
153     MetadataBufferType eType;               // must be kMetadataBufferTypeNativeHandleSource
154 
155 #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
156     OMX_PTR pHandle;
157 #else
158     native_handle_t *pHandle;
159 #endif
160 };
161 
162 // A pointer to this struct is passed to OMX_SetParameter() when the extension
163 // index "OMX.google.android.index.prepareForAdaptivePlayback" is given.
164 //
165 // This method is used to signal a video decoder, that the user has requested
166 // seamless resolution change support (if bEnable is set to OMX_TRUE).
167 // nMaxFrameWidth and nMaxFrameHeight are the dimensions of the largest
168 // anticipated frames in the video.  If bEnable is OMX_FALSE, no resolution
169 // change is expected, and the nMaxFrameWidth/Height fields are unused.
170 //
171 // If the decoder supports dynamic output buffers, it may ignore this
172 // request.  Otherwise, it shall request resources in such a way so that it
173 // avoids full port-reconfiguration (due to output port-definition change)
174 // during resolution changes.
175 //
176 // DO NOT USE THIS STRUCTURE AS IT WILL BE REMOVED.  INSTEAD, IMPLEMENT
177 // METADATA SUPPORT FOR VIDEO DECODERS.
178 struct PrepareForAdaptivePlaybackParams {
179     OMX_U32 nSize;
180     OMX_VERSIONTYPE nVersion;
181     OMX_U32 nPortIndex;
182     OMX_BOOL bEnable;
183     OMX_U32 nMaxFrameWidth;
184     OMX_U32 nMaxFrameHeight;
185 };
186 
187 // A pointer to this struct is passed to OMX_SetParameter when the extension
188 // index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
189 // given.  This call will only be performed if a prior call was made with the
190 // 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
191 // enabling use of Android native buffers.
192 struct UseAndroidNativeBufferParams {
193     OMX_U32 nSize;
194     OMX_VERSIONTYPE nVersion;
195     OMX_U32 nPortIndex;
196     OMX_PTR pAppPrivate;
197     OMX_BUFFERHEADERTYPE **bufferHeader;
198     const sp<ANativeWindowBuffer>& nativeBuffer;
199 };
200 
201 // A pointer to this struct is passed to OMX_GetParameter when the extension
202 // index for the 'OMX.google.android.index.getAndroidNativeBufferUsage'
203 // extension is given.  The usage bits returned from this query will be used to
204 // allocate the Gralloc buffers that get passed to the useAndroidNativeBuffer
205 // command.
206 struct GetAndroidNativeBufferUsageParams {
207     OMX_U32 nSize;              // IN
208     OMX_VERSIONTYPE nVersion;   // IN
209     OMX_U32 nPortIndex;         // IN
210     OMX_U32 nUsage;             // OUT
211 };
212 
213 // An enum OMX_COLOR_FormatAndroidOpaque to indicate an opaque colorformat
214 // is declared in media/stagefright/openmax/OMX_IVCommon.h
215 // This will inform the encoder that the actual
216 // colorformat will be relayed by the GRalloc Buffers.
217 // OMX_COLOR_FormatAndroidOpaque  = 0x7F000001,
218 
219 // A pointer to this struct is passed to OMX_SetParameter when the extension
220 // index for the 'OMX.google.android.index.prependSPSPPSToIDRFrames' extension
221 // is given.
222 // A successful result indicates that future IDR frames will be prefixed by
223 // SPS/PPS.
224 struct PrependSPSPPSToIDRFramesParams {
225     OMX_U32 nSize;
226     OMX_VERSIONTYPE nVersion;
227     OMX_BOOL bEnable;
228 };
229 
230 // A pointer to this struct is passed to OMX_GetParameter when the extension
231 // index for the 'OMX.google.android.index.describeColorFormat'
232 // extension is given.  This method can be called from any component state
233 // other than invalid.  The color-format, frame width/height, and stride/
234 // slice-height parameters are ones that are associated with a raw video
235 // port (input or output), but the stride/slice height parameters may be
236 // incorrect. bUsingNativeBuffers is OMX_TRUE if native android buffers will
237 // be used (while specifying this color format).
238 //
239 // The component shall fill out the MediaImage structure that
240 // corresponds to the described raw video format, and the potentially corrected
241 // stride and slice-height info.
242 //
243 // The behavior is slightly different if bUsingNativeBuffers is OMX_TRUE,
244 // though most implementations can ignore this difference. When using native buffers,
245 // the component may change the configured color format to an optimized format.
246 // Additionally, when allocating these buffers for flexible usecase, the framework
247 // will set the SW_READ/WRITE_OFTEN usage flags. In this case (if bUsingNativeBuffers
248 // is OMX_TRUE), the component shall fill out the MediaImage information for the
249 // scenario when these SW-readable/writable buffers are locked using gralloc_lock.
250 // Note, that these buffers may also be locked using gralloc_lock_ycbcr, which must
251 // be supported for vendor-specific formats.
252 //
253 // For non-YUV packed planar/semiplanar image formats, or if bUsingNativeBuffers
254 // is OMX_TRUE and the component does not support this color format with native
255 // buffers, the component shall set mNumPlanes to 0, and mType to MEDIA_IMAGE_TYPE_UNKNOWN.
256 
257 // @deprecated: use DescribeColorFormat2Params
258 struct DescribeColorFormat2Params;
259 struct DescribeColorFormatParams {
260     OMX_U32 nSize;
261     OMX_VERSIONTYPE nVersion;
262     // input: parameters from OMX_VIDEO_PORTDEFINITIONTYPE
263     OMX_COLOR_FORMATTYPE eColorFormat;
264     OMX_U32 nFrameWidth;
265     OMX_U32 nFrameHeight;
266     OMX_U32 nStride;
267     OMX_U32 nSliceHeight;
268     OMX_BOOL bUsingNativeBuffers;
269 
270     // output: fill out the MediaImage fields
271     MediaImage sMediaImage;
272 
273     DescribeColorFormatParams(const DescribeColorFormat2Params&); // for internal use only
274 };
275 
276 // A pointer to this struct is passed to OMX_GetParameter when the extension
277 // index for the 'OMX.google.android.index.describeColorFormat2'
278 // extension is given. This is operationally the same as DescribeColorFormatParams
279 // but can be used for HDR and RGBA/YUVA formats.
280 struct DescribeColorFormat2Params {
281     OMX_U32 nSize;
282     OMX_VERSIONTYPE nVersion;
283     // input: parameters from OMX_VIDEO_PORTDEFINITIONTYPE
284     OMX_COLOR_FORMATTYPE eColorFormat;
285     OMX_U32 nFrameWidth;
286     OMX_U32 nFrameHeight;
287     OMX_U32 nStride;
288     OMX_U32 nSliceHeight;
289     OMX_BOOL bUsingNativeBuffers;
290 
291     // output: fill out the MediaImage2 fields
292     MediaImage2 sMediaImage;
293 
294     void initFromV1(const DescribeColorFormatParams&); // for internal use only
295 };
296 
297 // A pointer to this struct is passed to OMX_SetParameter or OMX_GetParameter
298 // when the extension index for the
299 // 'OMX.google.android.index.configureVideoTunnelMode' extension is  given.
300 // If the extension is supported then tunneled playback mode should be supported
301 // by the codec. If bTunneled is set to OMX_TRUE then the video decoder should
302 // operate in "tunneled" mode and output its decoded frames directly to the
303 // sink. In this case nAudioHwSync is the HW SYNC ID of the audio HAL Output
304 // stream to sync the video with. If bTunneled is set to OMX_FALSE, "tunneled"
305 // mode should be disabled and nAudioHwSync should be ignored.
306 // OMX_GetParameter is used to query tunneling configuration. bTunneled should
307 // return whether decoder is operating in tunneled mode, and if it is,
308 // pSidebandWindow should contain the codec allocated sideband window handle.
309 struct ConfigureVideoTunnelModeParams {
310     OMX_U32 nSize;              // IN
311     OMX_VERSIONTYPE nVersion;   // IN
312     OMX_U32 nPortIndex;         // IN
313     OMX_BOOL bTunneled;         // IN/OUT
314     OMX_U32 nAudioHwSync;       // IN
315     OMX_PTR pSidebandWindow;    // OUT
316 };
317 
318 // Color space description (aspects) parameters.
319 // This is passed via OMX_SetConfig or OMX_GetConfig to video encoders and decoders when the
320 // 'OMX.google.android.index.describeColorAspects' extension is given. Component SHALL behave
321 // as described below if it supports this extension.
322 //
323 // bDataSpaceChanged and bRequestingDataSpace is assumed to be OMX_FALSE unless noted otherwise.
324 //
325 // VIDEO ENCODERS: the framework uses OMX_SetConfig to specify color aspects of the coded video.
326 // This may happen:
327 //   a) before the component transitions to idle state
328 //   b) before the input frame is sent via OMX_EmptyThisBuffer in executing state
329 //   c) during execution, just before an input frame with a different color aspect information
330 //      is sent.
331 //
332 // The framework also uses OMX_GetConfig to
333 //   d) verify the color aspects that will be written to the stream
334 //   e) (optional) verify the color aspects that should be reported to the container for a
335 //      given dataspace/pixelformat received
336 //
337 // 1. Encoders SHOULD maintain an internal color aspect state, initialized to Unspecified values.
338 //    This represents the values that will be written into the bitstream.
339 // 2. Upon OMX_SetConfig, they SHOULD update their internal state to the aspects received
340 //    (including Unspecified values). For specific aspect values that are not supported by the
341 //    codec standard, encoders SHOULD substitute Unspecified values; or they MAY use a suitable
342 //    alternative (e.g. to suggest the use of BT.709 EOTF instead of SMPTE 240M.)
343 // 3. OMX_GetConfig SHALL return the internal state (values that will be written).
344 // 4. OMX_SetConfig SHALL always succeed before receiving the first frame. It MAY fail afterwards,
345 //    but only if the configured values would change AND the component does not support updating the
346 //    color information to those values mid-stream. If component supports updating a portion of
347 //    the color information, those values should be updated in the internal state, and OMX_SetConfig
348 //    SHALL succeed. Otherwise, the internal state SHALL remain intact and OMX_SetConfig SHALL fail
349 //    with OMX_ErrorUnsupportedSettings.
350 // 5. When the framework receives an input frame with an unexpected dataspace, it will query
351 //    encoders for the color aspects that should be reported to the container using OMX_GetConfig
352 //    with bDataSpaceChanged set to OMX_TRUE, and nPixelFormat/nDataSpace containing the new
353 //    format/dataspace values. This allows vendors to use extended dataspace during capture and
354 //    composition (e.g. screenrecord) - while performing color-space conversion inside the encoder -
355 //    and encode and report a different color-space information in the bitstream/container.
356 //    sColorAspects contains the requested color aspects by the client for reference, which may
357 //    include aspects not supported by the encoding. This is used together with guidance for
358 //    dataspace selection; see 6. below.
359 //
360 // VIDEO DECODERS: the framework uses OMX_SetConfig to specify the default color aspects to use
361 // for the video.
362 // This may happen:
363 //   a) before the component transitions to idle state
364 //   b) during execution, when the resolution or the default color aspects change.
365 //
366 // The framework also uses OMX_GetConfig to
367 //   c) get the final color aspects reported by the coded bitstream after taking the default values
368 //      into account.
369 //
370 // 1. Decoders should maintain two color aspect states - the default state as reported by the
371 //    framework, and the coded state as reported by the bitstream - as each state can change
372 //    independently from the other.
373 // 2. Upon OMX_SetConfig, it SHALL update its default state regardless of whether such aspects
374 //    could be supplied by the component bitstream. (E.g. it should blindly support all enumeration
375 //    values, even unknown ones, and the Other value). This SHALL always succeed.
376 // 3. Upon OMX_GetConfig, the component SHALL return the final color aspects by replacing
377 //    Unspecified coded values with the default values. This SHALL always succeed.
378 // 4. Whenever the component processes color aspect information in the bitstream even with an
379 //    Unspecified value, it SHOULD update its internal coded state with that information just before
380 //    the frame with the new information would be outputted, and the component SHALL signal an
381 //    OMX_EventPortSettingsChanged event with data2 set to the extension index.
382 // NOTE: Component SHOULD NOT signal a separate event purely for color aspect change, if it occurs
383 //    together with a port definition (e.g. size) or crop change.
384 // 5. If the aspects a component encounters in the bitstream cannot be represented with enumeration
385 //    values as defined below, the component SHALL set those aspects to Other. Restricted values in
386 //    the bitstream SHALL be treated as defined by the relevant bitstream specifications/standards,
387 //    or as Unspecified, if not defined.
388 //
389 // BOTH DECODERS AND ENCODERS: the framework uses OMX_GetConfig during idle and executing state to
390 //   f) (optional) get guidance for the dataspace to set for given color aspects, by setting
391 //      bRequestingDataSpace to OMX_TRUE. The component SHALL return OMX_ErrorUnsupportedSettings
392 //      IF it does not support this request.
393 //
394 // 6. This is an information request that can happen at any time, independent of the normal
395 //    configuration process. This allows vendors to use extended dataspace during capture, playback
396 //    and composition - while performing color-space conversion inside the component. Component
397 //    SHALL set the desired dataspace into nDataSpace. Otherwise, it SHALL return
398 //    OMX_ErrorUnsupportedSettings to let the framework choose a nearby standard dataspace.
399 //
400 // 6.a. For encoders, this query happens before the first frame is received using surface encoding.
401 //    This allows the encoder to use a specific dataspace for the color aspects (e.g. because the
402 //    device supports additional dataspaces, or because it wants to perform color-space extension
403 //    to facilitate a more optimal rendering/capture pipeline.).
404 //
405 // 6.b. For decoders, this query happens before the first frame, and every time the color aspects
406 //    change, while using surface buffers. This allows the decoder to use a specific dataspace for
407 //    the color aspects (e.g. because the device supports additional dataspaces, or because it wants
408 //    to perform color-space extension by inline color-space conversion to facilitate a more optimal
409 //    rendering pipeline.).
410 //
411 // Note: the size of sAspects may increase in the future by additional fields.
412 // Implementations SHOULD NOT require a certain size.
413 struct DescribeColorAspectsParams {
414     OMX_U32 nSize;                 // IN
415     OMX_VERSIONTYPE nVersion;      // IN
416     OMX_U32 nPortIndex;            // IN
417     OMX_BOOL bRequestingDataSpace; // IN
418     OMX_BOOL bDataSpaceChanged;    // IN
419     OMX_U32 nPixelFormat;          // IN
420     OMX_U32 nDataSpace;            // OUT
421     ColorAspects sAspects;         // IN/OUT
422 };
423 
424 // HDR color description parameters.
425 // This is passed via OMX_SetConfig or OMX_GetConfig to video encoders and decoders when the
426 // 'OMX.google.android.index.describeHDRColorInfo' extension is given and an HDR stream
427 // is detected.  Component SHALL behave as described below if it supports this extension.
428 //
429 // Currently, only Static Metadata Descriptor Type 1 support is required.
430 //
431 // VIDEO ENCODERS: the framework uses OMX_SetConfig to specify the HDR static information of the
432 // coded video.
433 // This may happen:
434 //   a) before the component transitions to idle state
435 //   b) before the input frame is sent via OMX_EmptyThisBuffer in executing state
436 //   c) during execution, just before an input frame with a different HDR static
437 //      information is sent.
438 //
439 // The framework also uses OMX_GetConfig to
440 //   d) verify the HDR static information that will be written to the stream.
441 //
442 // 1. Encoders SHOULD maintain an internal HDR static info data, initialized to Unspecified values.
443 //    This represents the values that will be written into the bitstream.
444 // 2. Upon OMX_SetConfig, they SHOULD update their internal state to the info received
445 //    (including Unspecified values). For specific parameters that are not supported by the
446 //    codec standard, encoders SHOULD substitute Unspecified values. NOTE: no other substitution
447 //    is allowed.
448 // 3. OMX_GetConfig SHALL return the internal state (values that will be written).
449 // 4. OMX_SetConfig SHALL always succeed before receiving the first frame if the encoder is
450 //    configured into an HDR compatible profile. It MAY fail with OMX_ErrorUnsupportedSettings error
451 //    code if it is not configured into such a profile, OR if the configured values would change
452 //    AND the component does not support updating the HDR static information mid-stream. If the
453 //    component supports updating a portion of the information, those values should be updated in
454 //    the internal state, and OMX_SetConfig SHALL succeed. Otherwise, the internal state SHALL
455 //    remain intact.
456 //
457 // VIDEO DECODERS: the framework uses OMX_SetConfig to specify the default HDR static information
458 // to use for the video.
459 //   a) This only happens if the client supplies this information, in which case it occurs before
460 //      the component transitions to idle state.
461 //   b) This may also happen subsequently if the default HDR static information changes.
462 //
463 // The framework also uses OMX_GetConfig to
464 //   c) get the final HDR static information reported by the coded bitstream after taking the
465 //      default values into account.
466 //
467 // 1. Decoders should maintain two HDR static information structures - the default values as
468 //    reported by the framework, and the coded values as reported by the bitstream - as each
469 //    structure can change independently from the other.
470 // 2. Upon OMX_SetConfig, it SHALL update its default structure regardless of whether such static
471 //    parameters could be supplied by the component bitstream. (E.g. it should blindly support all
472 //    parameter values, even seemingly illegal ones). This SHALL always succeed.
473 //  Note: The descriptor ID used in sInfo may change in subsequent calls. (although for now only
474 //    Type 1 support is required.)
475 // 3. Upon OMX_GetConfig, the component SHALL return the final HDR static information by replacing
476 //    Unspecified coded values with the default values. This SHALL always succeed. This may be
477 //    provided using any supported descriptor ID (currently only Type 1) with the goal of expressing
478 //    the most of the available static information.
479 // 4. Whenever the component processes HDR static information in the bitstream even ones with
480 //    Unspecified parameters, it SHOULD update its internal coded structure with that information
481 //    just before the frame with the new information would be outputted, and the component SHALL
482 //    signal an OMX_EventPortSettingsChanged event with data2 set to the extension index.
483 // NOTE: Component SHOULD NOT signal a separate event purely for HDR static info change, if it
484 //    occurs together with a port definition (e.g. size), color aspect or crop change.
485 // 5. If certain parameters of the HDR static information encountered in the bitstream cannot be
486 //    represented using sInfo, the component SHALL use the closest representation.
487 //
488 // Note: the size of sInfo may increase in the future by supporting additional descriptor types.
489 // Implementations SHOULD NOT require a certain size.
490 struct DescribeHDRStaticInfoParams {
491     OMX_U32 nSize;                 // IN
492     OMX_VERSIONTYPE nVersion;      // IN
493     OMX_U32 nPortIndex;            // IN
494     HDRStaticInfo sInfo;           // IN/OUT
495 };
496 
497 }  // namespace android
498 
499 extern android::OMXPluginBase *createOMXPlugin();
500 
501 #endif  // HARDWARE_API_H_
502