1 /*
2 * Copyright (c) 2012 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TIME 0
18 //#define LOG_NDEBUG 0
19 #define LOG_TAG "OMXVideoDecoderVP9Hybrid"
20 #include <wrs_omxil_core/log.h>
21 #include "OMXVideoDecoderVP9Hybrid.h"
22
23 #include <system/window.h>
24 #include <hardware/hardware.h>
25 #include <hardware/gralloc.h>
26 #include <system/graphics.h>
27
28 #include <hal_public.h>
29
30 #define VP9_YV12_ALIGN (128-1)
31 static const char* VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
32
OMXVideoDecoderVP9Hybrid()33 OMXVideoDecoderVP9Hybrid::OMXVideoDecoderVP9Hybrid() {
34 LOGV("OMXVideoDecoderVP9Hybrid is constructed.");
35 mNativeBufferCount = OUTPORT_NATIVE_BUFFER_COUNT;
36 BuildHandlerList();
37 mLibHandle = NULL;
38 mOpenDecoder = NULL;
39 mInitDecoder = NULL;
40 mCloseDecoder = NULL;
41 mSingalRenderDone = NULL;
42 mDecoderDecode = NULL;
43 mCheckBufferAvailable = NULL;
44 mGetOutput = NULL;
45 mGetRawDataOutput = NULL;
46 mGetFrameResolution = NULL;
47 mDeinitDecoder = NULL;
48 mLastTimeStamp = 0;
49 mWorkingMode = RAWDATA_MODE;
50 mDecodedImageWidth = 0;
51 mDecodedImageHeight = 0;
52 mDecodedImageNewWidth = 0;
53 mDecodedImageNewHeight = 0;
54 }
55
~OMXVideoDecoderVP9Hybrid()56 OMXVideoDecoderVP9Hybrid::~OMXVideoDecoderVP9Hybrid() {
57 LOGV("OMXVideoDecoderVP9Hybrid is destructed.");
58 }
59
InitInputPortFormatSpecific(OMX_PARAM_PORTDEFINITIONTYPE * paramPortDefinitionInput)60 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::InitInputPortFormatSpecific(
61 OMX_PARAM_PORTDEFINITIONTYPE *paramPortDefinitionInput) {
62 // OMX_PARAM_PORTDEFINITIONTYPE
63 paramPortDefinitionInput->nBufferCountActual = INPORT_ACTUAL_BUFFER_COUNT;
64 paramPortDefinitionInput->nBufferCountMin = INPORT_MIN_BUFFER_COUNT;
65 paramPortDefinitionInput->nBufferSize = INPORT_BUFFER_SIZE;
66 paramPortDefinitionInput->format.video.cMIMEType = (OMX_STRING)VP9_MIME_TYPE;
67 paramPortDefinitionInput->format.video.eCompressionFormat = OMX_VIDEO_CodingVP9;
68 return OMX_ErrorNone;
69 }
70
ProcessorInit(void)71 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorInit(void) {
72 uint32_t buff[MAX_GRAPHIC_BUFFER_NUM];
73 uint32_t i, bufferCount;
74 bool gralloc_mode = (mWorkingMode == GRAPHICBUFFER_MODE);
75 uint32_t bufferSize, bufferHStride, bufferHeight, bufferVStride, bufferWidth;
76 if (!gralloc_mode) {
77 bufferHStride = 1920;
78 bufferVStride = 1088;
79 bufferWidth = 1920;
80 bufferHeight = 1080;
81 bufferCount = 12;
82 } else {
83 if (mAPMode == METADATA_MODE) {
84 const OMX_PARAM_PORTDEFINITIONTYPE *def_output = this->ports[OUTPORT_INDEX]->GetPortDefinition();
85 if (def_output == NULL) {
86 return OMX_ErrorBadParameter;
87 }
88 bufferCount = mMetaDataBuffersNum = def_output->nBufferCountActual;
89 mOMXBufferHeaderTypePtrNum = 0;
90
91 mGraphicBufferParam.graphicBufferColorFormat = def_output->format.video.eColorFormat;
92 mGraphicBufferParam.graphicBufferHStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
93 mGraphicBufferParam.graphicBufferVStride = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
94 mGraphicBufferParam.graphicBufferWidth = def_output->format.video.nFrameWidth;
95 mGraphicBufferParam.graphicBufferHeight = def_output->format.video.nFrameHeight;
96 mDecodedImageWidth = def_output->format.video.nFrameWidth;
97 mDecodedImageHeight = def_output->format.video.nFrameHeight;
98 } else{
99 bufferCount = mOMXBufferHeaderTypePtrNum;
100
101 for (i = 0; i < bufferCount; i++ ) {
102 OMX_BUFFERHEADERTYPE *buf_hdr = mOMXBufferHeaderTypePtrArray[i];
103 buff[i] = (uint32_t)(buf_hdr->pBuffer);
104 }
105 }
106
107 bufferHStride = mGraphicBufferParam.graphicBufferHStride;
108 bufferVStride = mGraphicBufferParam.graphicBufferVStride;
109 bufferWidth = mGraphicBufferParam.graphicBufferWidth;
110 bufferHeight = mGraphicBufferParam.graphicBufferHeight;
111 }
112
113 bufferSize = bufferHStride * bufferVStride * 1.5;
114
115 mLibHandle = dlopen("libDecoderVP9Hybrid.so", RTLD_NOW);
116 if (mLibHandle == NULL) {
117 LOGE("dlopen libDecoderVP9Hybrid.so fail\n");
118 return OMX_ErrorBadParameter;
119 } else {
120 LOGI("dlopen libDecoderVP9Hybrid.so successfully\n");
121 }
122 mOpenDecoder = (OpenFunc)dlsym(mLibHandle, "Decoder_Open");
123 mCloseDecoder = (CloseFunc)dlsym(mLibHandle, "Decoder_Close");
124 mInitDecoder = (InitFunc)dlsym(mLibHandle, "Decoder_Init");
125 mSingalRenderDone = (SingalRenderDoneFunc)dlsym(mLibHandle, "Decoder_SingalRenderDone");
126 mDecoderDecode = (DecodeFunc)dlsym(mLibHandle, "Decoder_Decode");
127 mCheckBufferAvailable = (IsBufferAvailableFunc)dlsym(mLibHandle, "Decoder_IsBufferAvailable");
128 mGetOutput = (GetOutputFunc)dlsym(mLibHandle, "Decoder_GetOutput");
129 mGetRawDataOutput = (GetRawDataOutputFunc)dlsym(mLibHandle, "Decoder_GetRawDataOutput");
130 mGetFrameResolution = (GetFrameResolutionFunc)dlsym(mLibHandle, "Decoder_GetFrameResolution");
131 mDeinitDecoder = (DeinitFunc)dlsym(mLibHandle, "Decoder_Deinit");
132 if (mOpenDecoder == NULL || mCloseDecoder == NULL
133 || mInitDecoder == NULL || mSingalRenderDone == NULL
134 || mDecoderDecode == NULL || mCheckBufferAvailable == NULL
135 || mGetOutput == NULL || mGetRawDataOutput == NULL
136 || mGetFrameResolution == NULL || mDeinitDecoder == NULL) {
137 return OMX_ErrorBadParameter;
138 }
139
140 if (mOpenDecoder(&mCtx,&mHybridCtx) == false) {
141 LOGE("open hybrid Decoder fail\n");
142 return OMX_ErrorBadParameter;
143 }
144
145 // FIXME: The proprietary part of the vp9hybrid decoder should be updated
146 // to take VStride as well as Height. For now it's convenient to
147 // use VStride as that was effectively what was done before..
148 mInitDecoder(mHybridCtx, bufferSize, bufferHStride, bufferWidth,
149 bufferHeight, bufferCount, gralloc_mode, buff, (uint32_t)mAPMode);
150 return OMX_ErrorNone;
151 }
152
ProcessorReset(void)153 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorReset(void)
154 {
155 uint32_t buff[MAX_GRAPHIC_BUFFER_NUM];
156 uint32_t i, bufferCount;
157 bool gralloc_mode = (mWorkingMode == GRAPHICBUFFER_MODE);
158 uint32_t bufferSize, bufferHStride, bufferHeight, bufferVStride, bufferWidth;
159 if (!gralloc_mode) {
160 bufferHStride = mDecodedImageWidth;
161 bufferVStride = mDecodedImageHeight;
162 bufferWidth = mDecodedImageWidth;
163 bufferHeight = mDecodedImageHeight;
164 bufferSize = bufferHStride * bufferVStride * 1.5;
165 bufferCount = 12;
166 } else {
167 if (mAPMode == METADATA_MODE) {
168 const OMX_PARAM_PORTDEFINITIONTYPE *def_output = this->ports[OUTPORT_INDEX]->GetPortDefinition();
169 if (def_output == NULL) {
170 return OMX_ErrorBadParameter;
171 }
172 bufferCount = mMetaDataBuffersNum = def_output->nBufferCountActual;
173 mOMXBufferHeaderTypePtrNum = 0;
174
175 mGraphicBufferParam.graphicBufferColorFormat = def_output->format.video.eColorFormat;
176 mGraphicBufferParam.graphicBufferHStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
177 mGraphicBufferParam.graphicBufferVStride = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
178 mGraphicBufferParam.graphicBufferWidth = def_output->format.video.nFrameWidth;
179 mGraphicBufferParam.graphicBufferHeight = def_output->format.video.nFrameHeight;
180 } else{
181 bufferCount = mOMXBufferHeaderTypePtrNum;
182
183 for (i = 0; i < bufferCount; i++ ) {
184 OMX_BUFFERHEADERTYPE *buf_hdr = mOMXBufferHeaderTypePtrArray[i];
185 buff[i] = (uint32_t)(buf_hdr->pBuffer);
186 }
187 }
188 bufferHStride = mGraphicBufferParam.graphicBufferHStride;
189 bufferVStride = mGraphicBufferParam.graphicBufferVStride;
190 bufferWidth = mGraphicBufferParam.graphicBufferWidth;
191 bufferHeight = mGraphicBufferParam.graphicBufferHeight;
192 }
193
194 bufferSize = bufferHStride * bufferVStride * 1.5;
195
196 // FIXME: The proprietary part of the vp9hybrid decoder should be updated
197 // to take VStride as well as Height. For now it's convenient to
198 // use VStride as that was effectively what was done before..
199 mInitDecoder(mHybridCtx, bufferSize, bufferHStride, bufferWidth,
200 bufferHeight, bufferCount, gralloc_mode, buff, (uint32_t)mAPMode);
201 mFormatChanged = false;
202 return OMX_ErrorNone;
203 }
204
isReallocateNeeded(const uint8_t * data,uint32_t data_sz)205 bool OMXVideoDecoderVP9Hybrid::isReallocateNeeded(const uint8_t * data,uint32_t data_sz)
206 {
207 bool gralloc_mode = (mWorkingMode == GRAPHICBUFFER_MODE);
208 uint32_t width, height;
209 bool ret = true;
210 if (gralloc_mode) {
211 ret = mGetFrameResolution(data,data_sz, &width, &height);
212 if (width == 0 || height == 0)
213 return false;
214
215 if (ret) {
216 if (mAPMode == METADATA_MODE) {
217 ret = (width != mDecodedImageWidth)
218 || (height != mDecodedImageHeight);
219 } else {
220 ret = width > mGraphicBufferParam.graphicBufferWidth
221 || height > mGraphicBufferParam.graphicBufferHeight;
222 }
223 if (ret) {
224 mDecodedImageNewWidth = width;
225 mDecodedImageNewHeight = height;
226 return true;
227 }
228 }
229 }
230
231 return ret;
232 }
233
ProcessorDeinit(void)234 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorDeinit(void) {
235 mCloseDecoder(mCtx,mHybridCtx);
236 mOMXBufferHeaderTypePtrNum = 0;
237 if (mLibHandle != NULL) {
238 dlclose(mLibHandle);
239 mLibHandle = NULL;
240 }
241 return OMX_ErrorNone;
242 }
243
ProcessorStop(void)244 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorStop(void) {
245 return OMXComponentCodecBase::ProcessorStop();
246 }
247
ProcessorFlush(OMX_U32 portIndex)248 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorFlush(OMX_U32 portIndex) {
249 if (portIndex == INPORT_INDEX || portIndex == OMX_ALL) {
250 // end the last frame
251 unsigned int width, height;
252 mDecoderDecode(mCtx,mHybridCtx,NULL,0,true);
253 mGetOutput(mCtx,mHybridCtx, &width, &height);
254 }
255 return OMX_ErrorNone;
256 }
257
ProcessorPreFillBuffer(OMX_BUFFERHEADERTYPE * buffer)258 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorPreFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
259 if (buffer->nOutputPortIndex == OUTPORT_INDEX){
260 unsigned int handle;
261 if (mAPMode == METADATA_MODE) {
262 bool found = false;
263 if (mOMXBufferHeaderTypePtrNum < mMetaDataBuffersNum) {
264 for (uint32_t i = 0; i < mOMXBufferHeaderTypePtrNum; i++) {
265 if (mOMXBufferHeaderTypePtrArray[i] == buffer) {
266 found = true;
267 break;
268 }
269 }
270 if (!found) {
271 mOMXBufferHeaderTypePtrArray[mOMXBufferHeaderTypePtrNum] = buffer;
272 mOMXBufferHeaderTypePtrNum++;
273 }
274 } else {
275 found = true;
276 }
277
278 android::VideoGrallocMetadata *metadata = (android::VideoGrallocMetadata *)(buffer->pBuffer);
279 handle = (unsigned int)metadata->pHandle;
280 mSingalRenderDone(mHybridCtx, handle, !found);
281 } else {
282 handle = (unsigned int)buffer->pBuffer;
283 mSingalRenderDone(mHybridCtx, handle, false);
284 }
285 }
286 return OMX_ErrorNone;
287 }
288
ProcessorProcess(OMX_BUFFERHEADERTYPE *** pBuffers,buffer_retain_t * retains,OMX_U32)289 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::ProcessorProcess(
290 OMX_BUFFERHEADERTYPE ***pBuffers,
291 buffer_retain_t *retains,
292 OMX_U32)
293 {
294 OMX_ERRORTYPE ret;
295 OMX_BUFFERHEADERTYPE *inBuffer = *pBuffers[INPORT_INDEX];
296 OMX_BUFFERHEADERTYPE *outBuffer = *pBuffers[OUTPORT_INDEX];
297 OMX_BOOL isResolutionChange = OMX_FALSE;
298 bool eos = (inBuffer->nFlags & OMX_BUFFERFLAG_EOS)? true : false;
299 eos = eos && (inBuffer->nFilledLen == 0);
300 static unsigned char *firstFrame = NULL;
301 static uint32_t firstFrameSize = 0;
302
303 if (inBuffer->pBuffer == NULL) {
304 LOGE("Buffer to decode is empty.");
305 return OMX_ErrorBadParameter;
306 }
307
308 if (inBuffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
309 LOGI("Buffer has OMX_BUFFERFLAG_CODECCONFIG flag.");
310 }
311
312 if (inBuffer->nFlags & OMX_BUFFERFLAG_DECODEONLY) {
313 LOGW("Buffer has OMX_BUFFERFLAG_DECODEONLY flag.");
314 }
315
316 if (firstFrameSize == 0 && inBuffer->nFilledLen != 0 && inBuffer->nTimeStamp != 0) {
317 if (firstFrame != NULL) {
318 free(firstFrame);
319 firstFrame = NULL;
320 }
321
322 firstFrame = (unsigned char *)malloc(inBuffer->nFilledLen);
323 memcpy(firstFrame, inBuffer->pBuffer + inBuffer->nOffset, inBuffer->nFilledLen);
324 firstFrameSize = inBuffer->nFilledLen;
325 }
326
327 if ((mWorkingMode == GRAPHICBUFFER_MODE) && (mAPMode == METADATA_MODE) && (!mFormatChanged)) {
328 bool mRet = mGetFrameResolution(inBuffer->pBuffer + inBuffer->nOffset, inBuffer->nFilledLen,
329 &mDecodedImageNewWidth,&mDecodedImageNewHeight);
330
331 if (mRet && ((mDecodedImageNewWidth != 0) && (mDecodedImageNewHeight != 0)) &&
332 ((mDecodedImageWidth != 0) && (mDecodedImageHeight != 0)) &&
333 ((mDecodedImageNewWidth != mDecodedImageWidth || mDecodedImageNewHeight != mDecodedImageHeight))) {
334 if (mLastTimeStamp == 0) {
335 retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
336 HandleFormatChange();
337 return OMX_ErrorNone;
338 } else {
339 // Detected format change in time.
340 // drain the last frame, keep the current input buffer
341 mDecoderDecode(mCtx, mHybridCtx, firstFrame, firstFrameSize, false);
342 retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
343
344 mFormatChanged = true;
345
346 ret = FillRenderBuffer(pBuffers[OUTPORT_INDEX], &retains[OUTPORT_INDEX],
347 eos ? OMX_BUFFERFLAG_EOS : 0, &isResolutionChange);
348
349 if (ret == OMX_ErrorNone)
350 (*pBuffers[OUTPORT_INDEX])->nTimeStamp = mLastTimeStamp;
351
352 mLastTimeStamp = inBuffer->nTimeStamp;
353
354 free(firstFrame);
355 firstFrame = NULL;
356 firstFrameSize = 0;
357 return ret;
358 }
359 } else if (!mRet && (mDecodedImageNewWidth == 0 || mDecodedImageNewHeight == 0)) {
360 retains[INPORT_INDEX] = BUFFER_RETAIN_NOT_RETAIN;
361 return OMX_ErrorBadParameter;
362 }
363 }
364
365 #if LOG_TIME == 1
366 struct timeval tv_start, tv_end;
367 int32_t time_ms;
368 gettimeofday(&tv_start,NULL);
369 #endif
370 int res = mDecoderDecode(mCtx,mHybridCtx,inBuffer->pBuffer + inBuffer->nOffset,inBuffer->nFilledLen, eos);
371 if (res != 0) {
372 if (res == -2) {
373 if (isReallocateNeeded(inBuffer->pBuffer + inBuffer->nOffset,inBuffer->nFilledLen)) {
374 if (mAPMode == METADATA_MODE) {
375 mFormatChanged = true;
376 } else {
377 retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
378 HandleFormatChange();
379 return OMX_ErrorNone;
380 }
381 }
382 // drain the last frame, keep the current input buffer
383 res = mDecoderDecode(mCtx,mHybridCtx,NULL,0,true);
384 retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
385 } else if (res == -3) {
386 LOGW("on2 decoder skipped to decode the frame.");
387 (*pBuffers[OUTPORT_INDEX])->nOffset = 0;
388 (*pBuffers[OUTPORT_INDEX])->nFilledLen = 0;
389 return OMX_ErrorNone;
390 } else {
391 LOGE("on2 decoder failed to decode frame.");
392 return OMX_ErrorBadParameter;
393 }
394 }
395
396 #if LOG_TIME == 1
397 gettimeofday(&tv_end,NULL);
398 time_ms = (int32_t)(tv_end.tv_sec - tv_start.tv_sec) * 1000 + (int32_t)(tv_end.tv_usec - tv_start.tv_usec)/1000;
399 LOGI("vpx_codec_decode: %d ms", time_ms);
400 #endif
401
402 ret = FillRenderBuffer(pBuffers[OUTPORT_INDEX],
403 &retains[OUTPORT_INDEX],
404 eos? OMX_BUFFERFLAG_EOS:0,
405 &isResolutionChange);
406
407 if (ret == OMX_ErrorNone) {
408 (*pBuffers[OUTPORT_INDEX])->nTimeStamp = mLastTimeStamp;
409 }
410 mLastTimeStamp = inBuffer->nTimeStamp;
411
412 if (isResolutionChange == OMX_TRUE) {
413 HandleFormatChange();
414 }
415 bool inputEoS = ((*pBuffers[INPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS);
416 bool outputEoS = ((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS);
417 // if output port is not eos, retain the input buffer
418 // until all the output buffers are drained.
419 if (inputEoS && !outputEoS && retains[INPORT_INDEX] != BUFFER_RETAIN_GETAGAIN) {
420 retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
421 // the input buffer is retained for draining purpose.
422 // Set nFilledLen to 0 so buffer will not be decoded again.
423 (*pBuffers[INPORT_INDEX])->nFilledLen = 0;
424 }
425
426 if (ret == OMX_ErrorNotReady) {
427 retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
428 ret = OMX_ErrorNone;
429 }
430
431 return ret;
432 }
433
FillRenderBuffer(OMX_BUFFERHEADERTYPE ** pBuffer,buffer_retain_t * retain,OMX_U32 inportBufferFlags,OMX_BOOL * isResolutionChange)434 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::FillRenderBuffer(OMX_BUFFERHEADERTYPE **pBuffer,
435 buffer_retain_t *retain,
436 OMX_U32 inportBufferFlags,
437 OMX_BOOL *isResolutionChange)
438 {
439 OMX_BUFFERHEADERTYPE *buffer = *pBuffer;
440 OMX_BUFFERHEADERTYPE *buffer_orign = buffer;
441
442 OMX_ERRORTYPE ret = OMX_ErrorNone;
443
444 int fb_index;
445 if (mWorkingMode == RAWDATA_MODE) {
446 const OMX_PARAM_PORTDEFINITIONTYPE *paramPortDefinitionOutput
447 = this->ports[OUTPORT_INDEX]->GetPortDefinition();
448 int32_t stride = paramPortDefinitionOutput->format.video.nStride;
449 int32_t height = paramPortDefinitionOutput->format.video.nFrameHeight;
450 int32_t width = paramPortDefinitionOutput->format.video.nFrameWidth;
451 unsigned char *dst = buffer->pBuffer;
452 fb_index = mGetRawDataOutput(mCtx,mHybridCtx,dst,height,stride);
453 if (fb_index == -1) {
454 if (inportBufferFlags & OMX_BUFFERFLAG_EOS) {
455 // eos frame is non-shown frame
456 buffer->nFlags = OMX_BUFFERFLAG_EOS;
457 buffer->nOffset = 0;
458 buffer->nFilledLen = 0;
459 return OMX_ErrorNone;
460 }
461 LOGV("vpx_codec_get_frame return NULL.");
462 return OMX_ErrorNotReady;
463 }
464 buffer->nOffset = 0;
465 buffer->nFilledLen = stride*height*3/2;
466 if (inportBufferFlags & OMX_BUFFERFLAG_EOS) {
467 buffer->nFlags = OMX_BUFFERFLAG_EOS;
468 }
469 return OMX_ErrorNone;
470 }
471
472 if (mFormatChanged && mAPMode == METADATA_MODE) {
473 fb_index = mGetOutput(mCtx,mHybridCtx, &mDecodedImageWidth, &mDecodedImageHeight);
474 } else {
475 fb_index = mGetOutput(mCtx,mHybridCtx, &mDecodedImageNewWidth, &mDecodedImageNewHeight);
476 }
477 if (fb_index == -1) {
478 if (mFormatChanged && mAPMode == METADATA_MODE) {
479 *isResolutionChange = OMX_TRUE;
480 return OMX_ErrorNone;
481 }
482
483 if (inportBufferFlags & OMX_BUFFERFLAG_EOS) {
484 // eos frame is no-shown frame
485 buffer->nFlags = OMX_BUFFERFLAG_EOS;
486 buffer->nOffset = 0;
487 buffer->nFilledLen = 0;
488 return OMX_ErrorNone;
489 }
490 LOGV("vpx_codec_get_frame return NULL.");
491 return OMX_ErrorNotReady;
492 }
493 if (mDecodedImageHeight == 0 && mDecodedImageWidth == 0) {
494 mDecodedImageWidth = mDecodedImageNewWidth;
495 mDecodedImageHeight = mDecodedImageNewHeight;
496 if (mAPMode == LEGACY_MODE)
497 *isResolutionChange = OMX_TRUE;
498 }
499
500 if (mAPMode == LEGACY_MODE) {
501 if ((mDecodedImageNewWidth != mDecodedImageWidth)
502 || (mDecodedImageNewHeight!= mDecodedImageHeight))
503 *isResolutionChange = OMX_TRUE;
504 } else {
505 if (mFormatChanged && ((mDecodedImageNewWidth != mDecodedImageWidth)
506 || (mDecodedImageNewHeight!= mDecodedImageHeight)))
507 *isResolutionChange = OMX_TRUE;
508 }
509 buffer = *pBuffer = mOMXBufferHeaderTypePtrArray[fb_index];
510 buffer->nOffset = 0;
511 buffer->nFilledLen = sizeof(OMX_U8*);
512 if (inportBufferFlags & OMX_BUFFERFLAG_EOS) {
513 buffer->nFlags = OMX_BUFFERFLAG_EOS;
514 }
515
516 if (buffer_orign != buffer) {
517 *retain = BUFFER_RETAIN_OVERRIDDEN;
518 }
519
520 ret = OMX_ErrorNone;
521
522 return ret;
523
524 }
525
PrepareConfigBuffer(VideoConfigBuffer *)526 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::PrepareConfigBuffer(VideoConfigBuffer *) {
527 return OMX_ErrorNone;
528 }
529
PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *,buffer_retain_t *,VideoDecodeBuffer *)530 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *,
531 buffer_retain_t *,
532 VideoDecodeBuffer *) {
533 return OMX_ErrorNone;
534 }
535
BuildHandlerList(void)536 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::BuildHandlerList(void) {
537 OMXVideoDecoderBase::BuildHandlerList();
538 return OMX_ErrorNone;
539 }
540
GetParamVideoVp9(OMX_PTR)541 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::GetParamVideoVp9(OMX_PTR) {
542 return OMX_ErrorNone;
543 }
544
SetParamVideoVp9(OMX_PTR)545 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::SetParamVideoVp9(OMX_PTR) {
546 return OMX_ErrorNone;
547 }
548
HandleFormatChange(void)549 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::HandleFormatChange(void)
550 {
551 ALOGE("handle format change from %dx%d to %dx%d",
552 mDecodedImageWidth,mDecodedImageHeight,mDecodedImageNewWidth,mDecodedImageNewHeight);
553 mDecodedImageWidth = mDecodedImageNewWidth;
554 mDecodedImageHeight = mDecodedImageNewHeight;
555 // Sync port definition as it may change.
556 OMX_PARAM_PORTDEFINITIONTYPE paramPortDefinitionInput, paramPortDefinitionOutput;
557
558 memcpy(¶mPortDefinitionInput,
559 this->ports[INPORT_INDEX]->GetPortDefinition(),
560 sizeof(paramPortDefinitionInput));
561
562 memcpy(¶mPortDefinitionOutput,
563 this->ports[OUTPORT_INDEX]->GetPortDefinition(),
564 sizeof(paramPortDefinitionOutput));
565
566 unsigned int width = mDecodedImageWidth;
567 unsigned int height = mDecodedImageHeight;
568 unsigned int stride = mDecodedImageWidth;
569 unsigned int sliceHeight = mDecodedImageHeight;
570
571 unsigned int widthCropped = mDecodedImageWidth;
572 unsigned int heightCropped = mDecodedImageHeight;
573 unsigned int strideCropped = widthCropped;
574 unsigned int sliceHeightCropped = heightCropped;
575
576 if (widthCropped == paramPortDefinitionOutput.format.video.nFrameWidth &&
577 heightCropped == paramPortDefinitionOutput.format.video.nFrameHeight) {
578 if (mWorkingMode == RAWDATA_MODE) {
579 LOGW("Change of portsetting is not reported as size is not changed.");
580 return OMX_ErrorNone;
581 }
582 }
583
584 if (mAPMode == METADATA_MODE) {
585 paramPortDefinitionOutput.nBufferCountActual = mNativeBufferCount;
586 paramPortDefinitionOutput.nBufferCountMin = mNativeBufferCount - 4;
587 }
588 paramPortDefinitionInput.format.video.nFrameWidth = width;
589 paramPortDefinitionInput.format.video.nFrameHeight = height;
590 paramPortDefinitionInput.format.video.nStride = stride;
591 paramPortDefinitionInput.format.video.nSliceHeight = sliceHeight;
592
593 if (mWorkingMode == RAWDATA_MODE) {
594 paramPortDefinitionOutput.format.video.nFrameWidth = widthCropped;
595 paramPortDefinitionOutput.format.video.nFrameHeight = heightCropped;
596 paramPortDefinitionOutput.format.video.nStride = strideCropped;
597 paramPortDefinitionOutput.format.video.nSliceHeight = sliceHeightCropped;
598 } else if (mWorkingMode == GRAPHICBUFFER_MODE) {
599 // when the width and height ES parse are not larger than allocated graphic buffer in outport,
600 // there is no need to reallocate graphic buffer,just report the crop info to omx client
601 if (mAPMode == LEGACY_MODE &&
602 width <= mGraphicBufferParam.graphicBufferWidth &&
603 height <= mGraphicBufferParam.graphicBufferHeight) {
604 this->ports[INPORT_INDEX]->SetPortDefinition(¶mPortDefinitionInput, true);
605 this->ports[OUTPORT_INDEX]->ReportOutputCrop();
606 return OMX_ErrorNone;
607 }
608
609 if (mAPMode == METADATA_MODE ||
610 width > mGraphicBufferParam.graphicBufferWidth ||
611 height > mGraphicBufferParam.graphicBufferHeight) {
612 // update the real decoded resolution to outport instead of display resolution
613 // for graphic buffer reallocation
614 // when the width and height parsed from ES are larger than allocated graphic buffer in outport,
615 paramPortDefinitionOutput.format.video.nFrameWidth = width;
616 paramPortDefinitionOutput.format.video.nFrameHeight = height;
617 paramPortDefinitionOutput.format.video.eColorFormat = GetOutputColorFormat(
618 paramPortDefinitionOutput.format.video.nFrameWidth);
619 paramPortDefinitionOutput.format.video.nStride = stride;
620 paramPortDefinitionOutput.format.video.nSliceHeight = sliceHeight;
621 }
622 }
623
624 paramPortDefinitionOutput.bEnabled = (OMX_BOOL)false;
625 mOMXBufferHeaderTypePtrNum = 0;
626 mMetaDataBuffersNum = 0;
627 memset(&mGraphicBufferParam, 0, sizeof(mGraphicBufferParam));
628 mDeinitDecoder(mHybridCtx);
629
630 this->ports[INPORT_INDEX]->SetPortDefinition(¶mPortDefinitionInput, true);
631 this->ports[OUTPORT_INDEX]->SetPortDefinition(¶mPortDefinitionOutput, true);
632
633 this->ports[OUTPORT_INDEX]->ReportPortSettingsChanged();
634 return OMX_ErrorNone;
635 }
636
637
GetOutputColorFormat(int)638 OMX_COLOR_FORMATTYPE OMXVideoDecoderVP9Hybrid::GetOutputColorFormat(int) {
639 LOGV("Output color format is HAL_PIXEL_FORMAT_INTEL_YV12.");
640 return (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_INTEL_YV12;
641 }
642
GetDecoderOutputCropSpecific(OMX_PTR pStructure)643 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::GetDecoderOutputCropSpecific(OMX_PTR pStructure) {
644
645 OMX_ERRORTYPE ret = OMX_ErrorNone;
646 OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)pStructure;
647
648 CHECK_TYPE_HEADER(rectParams);
649
650 if (rectParams->nPortIndex != OUTPORT_INDEX) {
651 return OMX_ErrorUndefined;
652 }
653
654 const OMX_PARAM_PORTDEFINITIONTYPE *paramPortDefinitionInput
655 = this->ports[INPORT_INDEX]->GetPortDefinition();
656
657 rectParams->nLeft = VPX_DECODE_BORDER;
658 rectParams->nTop = VPX_DECODE_BORDER;
659 rectParams->nWidth = paramPortDefinitionInput->format.video.nFrameWidth;
660 rectParams->nHeight = paramPortDefinitionInput->format.video.nFrameHeight;
661
662 return ret;
663 }
664
GetNativeBufferUsageSpecific(OMX_PTR pStructure)665 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::GetNativeBufferUsageSpecific(OMX_PTR pStructure) {
666 OMX_ERRORTYPE ret;
667 android::GetAndroidNativeBufferUsageParams *param =
668 (android::GetAndroidNativeBufferUsageParams*)pStructure;
669 CHECK_TYPE_HEADER(param);
670
671 param->nUsage |= (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN
672 | GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_EXTERNAL_DISP);
673 return OMX_ErrorNone;
674
675 }
SetNativeBufferModeSpecific(OMX_PTR pStructure)676 OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::SetNativeBufferModeSpecific(OMX_PTR pStructure) {
677 OMX_ERRORTYPE ret;
678 android::EnableAndroidNativeBuffersParams *param =
679 (android::EnableAndroidNativeBuffersParams*)pStructure;
680
681 CHECK_TYPE_HEADER(param);
682 CHECK_PORT_INDEX_RANGE(param);
683 CHECK_SET_PARAM_STATE();
684
685 PortVideo *port = NULL;
686 port = static_cast<PortVideo *>(this->ports[OUTPORT_INDEX]);
687 OMX_PARAM_PORTDEFINITIONTYPE port_def;
688 memcpy(&port_def,port->GetPortDefinition(),sizeof(port_def));
689
690 if (!param->enable) {
691 mWorkingMode = RAWDATA_MODE;
692 LOGI("Raw data mode is used");
693 // If it is fallback from native mode the color format has been
694 // already set to INTEL format.
695 // We need to set back the default color format and Native stuff.
696 port_def.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
697 port_def.format.video.pNativeRender = NULL;
698 port_def.format.video.pNativeWindow = NULL;
699 port->SetPortDefinition(&port_def,true);
700 return OMX_ErrorNone;
701 }
702
703 mWorkingMode = GRAPHICBUFFER_MODE;
704 port_def.nBufferCountMin = mNativeBufferCount - 4;
705 port_def.nBufferCountActual = mNativeBufferCount;
706 port_def.format.video.cMIMEType = (OMX_STRING)VA_VED_RAW_MIME_TYPE;
707 // add borders for libvpx decode need.
708 port_def.format.video.nFrameWidth += VPX_DECODE_BORDER * 2;
709 port_def.format.video.nFrameHeight += VPX_DECODE_BORDER * 2;
710 mDecodedImageWidth = port_def.format.video.nFrameWidth;
711 mDecodedImageHeight = port_def.format.video.nFrameHeight;
712 port_def.format.video.eColorFormat = GetOutputColorFormat(port_def.format.video.nFrameWidth);
713 port->SetPortDefinition(&port_def,true);
714
715 return OMX_ErrorNone;
716 }
717
718
IsAllBufferAvailable(void)719 bool OMXVideoDecoderVP9Hybrid::IsAllBufferAvailable(void) {
720 bool b = ComponentBase::IsAllBufferAvailable();
721 if (b == false) {
722 return false;
723 }
724
725 PortVideo *port = NULL;
726 port = static_cast<PortVideo *>(this->ports[OUTPORT_INDEX]);
727 const OMX_PARAM_PORTDEFINITIONTYPE* port_def = port->GetPortDefinition();
728 // if output port is disabled, retain the input buffer
729 if (!port_def->bEnabled) {
730 return false;
731 }
732 return mCheckBufferAvailable(mHybridCtx);
733 }
734
735 DECLARE_OMX_COMPONENT("OMX.Intel.VideoDecoder.VP9.hybrid", "video_decoder.vp9", OMXVideoDecoderVP9Hybrid);
736