1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderBase.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <va/va_android.h>
21 #include <va/va_tpi.h>
22 #ifdef __SSE4_1__
23 #include "use_util_sse4.h"
24 #endif
25
26 #define INVALID_PTS ((uint64_t)-1)
27 #define MAXIMUM_POC 0x7FFFFFFF
28 #define MINIMUM_POC 0x80000000
29 #define ANDROID_DISPLAY_HANDLE 0x18C34078
30
VideoDecoderBase(const char * mimeType,_vbp_parser_type type)31 VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
32 : mInitialized(false),
33 mLowDelay(false),
34 mStoreMetaData(false),
35 mDisplay(NULL),
36 mVADisplay(NULL),
37 mVAContext(VA_INVALID_ID),
38 mVAConfig(VA_INVALID_ID),
39 mVAStarted(false),
40 mCurrentPTS(INVALID_PTS),
41 mAcquiredBuffer(NULL),
42 mLastReference(NULL),
43 mForwardReference(NULL),
44 mDecodingFrame(false),
45 mSizeChanged(false),
46 mShowFrame(true),
47 mOutputWindowSize(OUTPUT_WINDOW_SIZE),
48 mRotationDegrees(0),
49 mErrReportEnabled(false),
50 mWiDiOn(false),
51 mRawOutput(false),
52 mManageReference(true),
53 mOutputMethod(OUTPUT_BY_PCT),
54 mNumSurfaces(0),
55 mSurfaceBuffers(NULL),
56 mOutputHead(NULL),
57 mOutputTail(NULL),
58 mSurfaces(NULL),
59 mVASurfaceAttrib(NULL),
60 mSurfaceUserPtr(NULL),
61 mSurfaceAcquirePos(0),
62 mNextOutputPOC(MINIMUM_POC),
63 mParserType(type),
64 mParserHandle(NULL),
65 mSignalBufferSize(0) {
66
67 memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo));
68 memset(&mConfigBuffer, 0, sizeof(mConfigBuffer));
69 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
70 mSignalBufferPre[i] = NULL;
71 }
72 pthread_mutex_init(&mLock, NULL);
73 pthread_mutex_init(&mFormatLock, NULL);
74 mVideoFormatInfo.mimeType = strdup(mimeType);
75 mUseGEN = false;
76 mMetaDataBuffersNum = 0;
77 mLibHandle = NULL;
78 mParserOpen = NULL;
79 mParserClose = NULL;
80 mParserParse = NULL;
81 mParserQuery = NULL;
82 mParserFlush = NULL;
83 mParserUpdate = NULL;
84 }
85
~VideoDecoderBase()86 VideoDecoderBase::~VideoDecoderBase() {
87 pthread_mutex_destroy(&mLock);
88 pthread_mutex_destroy(&mFormatLock);
89 stop();
90 free(mVideoFormatInfo.mimeType);
91 }
92
start(VideoConfigBuffer * buffer)93 Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
94 if (buffer == NULL) {
95 return DECODE_INVALID_DATA;
96 }
97
98 if (mParserHandle != NULL) {
99 WTRACE("Decoder has already started.");
100 return DECODE_SUCCESS;
101 }
102 mLibHandle = dlopen("libmixvbp.so", RTLD_NOW);
103 if (mLibHandle == NULL) {
104 return DECODE_NO_PARSER;
105 }
106 mParserOpen = (OpenFunc)dlsym(mLibHandle, "vbp_open");
107 mParserClose = (CloseFunc)dlsym(mLibHandle, "vbp_close");
108 mParserParse = (ParseFunc)dlsym(mLibHandle, "vbp_parse");
109 mParserQuery = (QueryFunc)dlsym(mLibHandle, "vbp_query");
110 mParserFlush = (FlushFunc)dlsym(mLibHandle, "vbp_flush");
111 if (mParserOpen == NULL || mParserClose == NULL || mParserParse == NULL
112 || mParserQuery == NULL || mParserFlush == NULL) {
113 return DECODE_NO_PARSER;
114 }
115 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
116 mParserUpdate = (UpdateFunc)dlsym(mLibHandle, "vbp_update");
117 if (mParserUpdate == NULL) {
118 return DECODE_NO_PARSER;
119 }
120 #endif
121 if ((int32_t)mParserType != VBP_INVALID) {
122 ITRACE("mParserType = %d", mParserType);
123 if (mParserOpen(mParserType, &mParserHandle) != VBP_OK) {
124 ETRACE("Failed to open VBP parser.");
125 return DECODE_NO_PARSER;
126 }
127 }
128 // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter.
129 mConfigBuffer = *buffer;
130 mConfigBuffer.data = NULL;
131 mConfigBuffer.size = 0;
132
133 mVideoFormatInfo.width = buffer->width;
134 mVideoFormatInfo.height = buffer->height;
135 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
136 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
137 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
138 }
139 mLowDelay = buffer->flag & WANT_LOW_DELAY;
140 mStoreMetaData = buffer->flag & WANT_STORE_META_DATA;
141 mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
142 if (mRawOutput) {
143 WTRACE("Output is raw data.");
144 }
145
146 return DECODE_SUCCESS;
147 }
148
149
reset(VideoConfigBuffer * buffer)150 Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) {
151 if (buffer == NULL) {
152 return DECODE_INVALID_DATA;
153 }
154
155 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
156 terminateVA();
157
158 // reset the mconfigBuffer to pass it for startVA.
159 mConfigBuffer = *buffer;
160 mConfigBuffer.data = NULL;
161 mConfigBuffer.size = 0;
162
163 mVideoFormatInfo.width = buffer->width;
164 mVideoFormatInfo.height = buffer->height;
165 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
166 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
167 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
168 }
169 mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
170 mLowDelay = buffer->flag & WANT_LOW_DELAY;
171 mStoreMetaData = buffer->flag & WANT_STORE_META_DATA;
172 mMetaDataBuffersNum = 0;
173 mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
174 if (mRawOutput) {
175 WTRACE("Output is raw data.");
176 }
177 return DECODE_SUCCESS;
178 }
179
180
181
stop(void)182 void VideoDecoderBase::stop(void) {
183 terminateVA();
184
185 mCurrentPTS = INVALID_PTS;
186 mAcquiredBuffer = NULL;
187 mLastReference = NULL;
188 mForwardReference = NULL;
189 mDecodingFrame = false;
190 mSizeChanged = false;
191
192 // private variables
193 mLowDelay = false;
194 mStoreMetaData = false;
195 mRawOutput = false;
196 mNumSurfaces = 0;
197 mSurfaceAcquirePos = 0;
198 mNextOutputPOC = MINIMUM_POC;
199 mVideoFormatInfo.valid = false;
200 if (mParserHandle){
201 mParserClose(mParserHandle);
202 mParserHandle = NULL;
203 }
204 if (mLibHandle) {
205 dlclose(mLibHandle);
206 mLibHandle = NULL;
207 }
208 }
209
flush(void)210 void VideoDecoderBase::flush(void) {
211 if (mVAStarted == false) {
212 // nothing to flush at this stage
213 return;
214 }
215
216 endDecodingFrame(true);
217
218 VideoSurfaceBuffer *p = mOutputHead;
219 // check if there's buffer with DRC flag in the output queue
220 while (p) {
221 if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) {
222 mSizeChanged = true;
223 break;
224 }
225 p = p->next;
226 }
227 // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing
228 // (surface is still being rendered)
229 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces;
230 mNextOutputPOC = MINIMUM_POC;
231 mCurrentPTS = INVALID_PTS;
232 mAcquiredBuffer = NULL;
233 mLastReference = NULL;
234 mForwardReference = NULL;
235 mOutputHead = NULL;
236 mOutputTail = NULL;
237 mDecodingFrame = false;
238
239 // flush vbp parser
240 if (mParserHandle && (mParserFlush(mParserHandle) != VBP_OK)) {
241 WTRACE("Failed to flush parser. Continue");
242 }
243
244 // initialize surface buffer without resetting mapped/raw data
245 initSurfaceBuffer(false);
246
247 }
248
freeSurfaceBuffers(void)249 void VideoDecoderBase::freeSurfaceBuffers(void) {
250 if (mVAStarted == false) {
251 // nothing to free surface buffers at this stage
252 return;
253 }
254
255 pthread_mutex_lock(&mLock);
256
257 endDecodingFrame(true);
258
259 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
260 terminateVA();
261
262 pthread_mutex_unlock(&mLock);
263 }
264
getFormatInfo(void)265 const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
266 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
267 // Do nothing here, just to avoid thread
268 // contention in updateFormatInfo()
269 pthread_mutex_lock(&mFormatLock);
270 pthread_mutex_unlock(&mFormatLock);
271 }
272
273 return &mVideoFormatInfo;
274 }
275
getOutputQueueLength(void)276 int VideoDecoderBase::getOutputQueueLength(void) {
277 VideoSurfaceBuffer *p = mOutputHead;
278
279 int i = 0;
280 while (p) {
281 p = p->next;
282 i++;
283 }
284
285 return i;
286 }
287
getOutput(bool draining,VideoErrorBuffer * outErrBuf)288 const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) {
289 if (mVAStarted == false) {
290 return NULL;
291 }
292 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
293
294 if (draining) {
295 // complete decoding the last frame and ignore return
296 endDecodingFrame(false);
297 }
298
299 if (mOutputHead == NULL) {
300 return NULL;
301 }
302
303 // output by position (the first buffer)
304 VideoSurfaceBuffer *outputByPos = mOutputHead;
305
306 if (mLowDelay) {
307 mOutputHead = mOutputHead->next;
308 if (mOutputHead == NULL) {
309 mOutputTail = NULL;
310 }
311 vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp);
312 if (useGraphicBuffer && !mUseGEN) {
313 vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface);
314 fillDecodingErrors(&(outputByPos->renderBuffer));
315 }
316 if (draining && mOutputTail == NULL) {
317 outputByPos->renderBuffer.flag |= IS_EOS;
318 }
319 drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer));
320
321 return &(outputByPos->renderBuffer);
322 }
323
324 VideoSurfaceBuffer *output = NULL;
325 if (mOutputMethod == OUTPUT_BY_POC) {
326 output = findOutputByPoc(draining);
327 } else if (mOutputMethod == OUTPUT_BY_PCT) {
328 output = findOutputByPct(draining);
329 } else {
330 ETRACE("Invalid output method.");
331 return NULL;
332 }
333
334 if (output == NULL) {
335 return NULL;
336 }
337
338 if (output != outputByPos) {
339 // remove this output from middle or end of the list
340 VideoSurfaceBuffer *p = outputByPos;
341 while (p->next != output) {
342 p = p->next;
343 }
344 p->next = output->next;
345 if (mOutputTail == output) {
346 mOutputTail = p;
347 }
348 } else {
349 // remove this output from head of the list
350 mOutputHead = mOutputHead->next;
351 if (mOutputHead == NULL) {
352 mOutputTail = NULL;
353 }
354 }
355 //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6);
356 vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp);
357
358 if (useGraphicBuffer && !mUseGEN) {
359 vaSyncSurface(mVADisplay, output->renderBuffer.surface);
360 fillDecodingErrors(&(output->renderBuffer));
361 }
362
363 if (draining && mOutputTail == NULL) {
364 output->renderBuffer.flag |= IS_EOS;
365 }
366
367 drainDecodingErrors(outErrBuf, &(output->renderBuffer));
368
369 return &(output->renderBuffer);
370 }
371
findOutputByPts()372 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts() {
373 // output by presentation time stamp - buffer with the smallest time stamp is output
374 VideoSurfaceBuffer *p = mOutputHead;
375 VideoSurfaceBuffer *outputByPts = NULL;
376 uint64_t pts = INVALID_PTS;
377 do {
378 if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) {
379 // find buffer with the smallest PTS
380 pts = p->renderBuffer.timeStamp;
381 outputByPts = p;
382 }
383 p = p->next;
384 } while (p != NULL);
385
386 return outputByPts;
387 }
388
findOutputByPct(bool draining)389 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) {
390 // output by picture coding type (PCT)
391 // if there is more than one reference frame, the first reference frame is ouput, otherwise,
392 // output non-reference frame if there is any.
393
394 VideoSurfaceBuffer *p = mOutputHead;
395 VideoSurfaceBuffer *outputByPct = NULL;
396 int32_t reference = 0;
397 do {
398 if (p->referenceFrame) {
399 reference++;
400 if (reference > 1) {
401 // mOutputHead must be a reference frame
402 outputByPct = mOutputHead;
403 break;
404 }
405 } else {
406 // first non-reference frame
407 outputByPct = p;
408 break;
409 }
410 p = p->next;
411 } while (p != NULL);
412
413 if (outputByPct == NULL && draining) {
414 outputByPct = mOutputHead;
415 }
416 return outputByPct;
417 }
418
419 #if 0
420 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
421 // output by picture order count (POC)
422 // Output criteria:
423 // if there is IDR frame (POC == 0), all the frames before IDR must be output;
424 // Otherwise, if draining flag is set or list is full, frame with the least POC is output;
425 // Otherwise, NOTHING is output
426
427 int32_t dpbFullness = 0;
428 for (int32_t i = 0; i < mNumSurfaces; i++) {
429 // count num of reference frames
430 if (mSurfaceBuffers[i].asReferernce) {
431 dpbFullness++;
432 }
433 }
434
435 if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) {
436 // frame is being decoded and is not ready for output yet
437 dpbFullness--;
438 }
439
440 VideoSurfaceBuffer *p = mOutputHead;
441 while (p != NULL) {
442 // count dpbFullness with non-reference frame in the output queue
443 if (p->asReferernce == false) {
444 dpbFullness++;
445 }
446 p = p->next;
447 }
448
449 Retry:
450 p = mOutputHead;
451 VideoSurfaceBuffer *outputByPoc = NULL;
452 int32_t count = 0;
453 int32_t poc = MAXIMUM_POC;
454
455 do {
456 if (p->pictureOrder == 0) {
457 // output picture with the least POC before IDR
458 if (outputByPoc != NULL) {
459 mNextOutputPOC = outputByPoc->pictureOrder + 1;
460 return outputByPoc;
461 } else {
462 mNextOutputPOC = MINIMUM_POC;
463 }
464 }
465
466 // POC of the output candidate must not be less than mNextOutputPOC
467 if (p->pictureOrder < mNextOutputPOC) {
468 break;
469 }
470
471 if (p->pictureOrder < poc) {
472 // update the least POC.
473 poc = p->pictureOrder;
474 outputByPoc = p;
475 }
476 count++;
477 p = p->next;
478 } while (p != NULL && count < mOutputWindowSize);
479
480 if (draining == false && dpbFullness < mOutputWindowSize) {
481 // list is not full and we are not in draining state
482 // if DPB is already full, one frame must be output
483 return NULL;
484 }
485
486 if (outputByPoc == NULL) {
487 mNextOutputPOC = MINIMUM_POC;
488 goto Retry;
489 }
490
491 // for debugging purpose
492 if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) {
493 ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder);
494 //gaps_in_frame_num_value_allowed_flag is not currently supported
495 }
496
497 mNextOutputPOC = outputByPoc->pictureOrder + 1;
498
499 return outputByPoc;
500 }
501 #else
findOutputByPoc(bool draining)502 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
503 VideoSurfaceBuffer *output = NULL;
504 VideoSurfaceBuffer *p = mOutputHead;
505 int32_t count = 0;
506 int32_t poc = MAXIMUM_POC;
507 VideoSurfaceBuffer *outputleastpoc = mOutputHead;
508 do {
509 count++;
510 if (p->pictureOrder == 0) {
511 // any picture before this POC (new IDR) must be output
512 if (output == NULL) {
513 mNextOutputPOC = MINIMUM_POC;
514 // looking for any POC with negative value
515 } else {
516 mNextOutputPOC = output->pictureOrder + 1;
517 break;
518 }
519 }
520 if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) {
521 // this POC meets ouput criteria.
522 poc = p->pictureOrder;
523 output = p;
524 outputleastpoc = p;
525 }
526 if (poc == mNextOutputPOC || count == mOutputWindowSize) {
527 if (output != NULL) {
528 // this indicates two cases:
529 // 1) the next output POC is found.
530 // 2) output queue is full and there is at least one buffer meeting the output criteria.
531 mNextOutputPOC = output->pictureOrder + 1;
532 break;
533 } else {
534 // this indicates output queue is full and no buffer in the queue meets the output criteria
535 // restart processing as queue is FULL and output criteria is changed. (next output POC is 0)
536 mNextOutputPOC = MINIMUM_POC;
537 count = 0;
538 poc = MAXIMUM_POC;
539 p = mOutputHead;
540 continue;
541 }
542 }
543 if (p->next == NULL) {
544 output = NULL;
545 }
546
547 p = p->next;
548 } while (p != NULL);
549
550 if (draining == true && output == NULL) {
551 output = outputleastpoc;
552 }
553
554 return output;
555 }
556 #endif
557
checkBufferAvail(void)558 bool VideoDecoderBase::checkBufferAvail(void) {
559 if (!mInitialized) {
560 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) {
561 return true;
562 }
563 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
564 if (mSignalBufferPre[i] != NULL) {
565 return true;
566 }
567 }
568 return false;
569 }
570 // check whether there is buffer available for decoding
571 // TODO: check frame being referenced for frame skipping
572 VideoSurfaceBuffer *buffer = NULL;
573 for (int32_t i = 0; i < mNumSurfaces; i++) {
574 buffer = mSurfaceBuffers + i;
575
576 if (buffer->asReferernce == false &&
577 buffer->renderBuffer.renderDone == true) {
578 querySurfaceRenderStatus(buffer);
579 if (buffer->renderBuffer.driverRenderDone == true)
580 return true;
581 }
582 }
583 return false;
584 }
585
acquireSurfaceBuffer(void)586 Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) {
587 if (mVAStarted == false) {
588 return DECODE_FAIL;
589 }
590
591 if (mAcquiredBuffer != NULL) {
592 ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
593 return DECODE_FAIL;
594 }
595
596 int nextAcquire = mSurfaceAcquirePos;
597 VideoSurfaceBuffer *acquiredBuffer = NULL;
598 bool acquired = false;
599
600 while (acquired == false) {
601 acquiredBuffer = mSurfaceBuffers + nextAcquire;
602
603 querySurfaceRenderStatus(acquiredBuffer);
604
605 if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) {
606 // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping
607 VideoSurfaceBuffer *temp;
608 acquired = true;
609 for (int i = 0; i < mNumSurfaces; i++) {
610 if (i == nextAcquire) {
611 continue;
612 }
613 temp = mSurfaceBuffers + i;
614 // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use.
615 if (temp->renderBuffer.surface == mSurfaces[nextAcquire] &&
616 temp->renderBuffer.renderDone == false) {
617 ITRACE("Surface is referenced by other surface buffer.");
618 acquired = false;
619 break;
620 }
621 }
622 }
623 if (acquired) {
624 break;
625 }
626 nextAcquire++;
627 if (nextAcquire == mNumSurfaces) {
628 nextAcquire = 0;
629 }
630 if (nextAcquire == mSurfaceAcquirePos) {
631 return DECODE_NO_SURFACE;
632 }
633 }
634
635 if (acquired == false) {
636 return DECODE_NO_SURFACE;
637 }
638
639 mAcquiredBuffer = acquiredBuffer;
640 mSurfaceAcquirePos = nextAcquire;
641
642 // set surface again as surface maybe reset by skipped frame.
643 // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output.
644 mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos];
645 if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) {
646 mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos];
647 }
648 mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS;
649 mAcquiredBuffer->renderBuffer.display = mVADisplay;
650 mAcquiredBuffer->renderBuffer.flag = 0;
651 mAcquiredBuffer->renderBuffer.renderDone = false;
652 mAcquiredBuffer->asReferernce = false;
653 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0;
654 mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS;
655
656 return DECODE_SUCCESS;
657 }
658
outputSurfaceBuffer(void)659 Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) {
660 Decode_Status status;
661 if (mAcquiredBuffer == NULL) {
662 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
663 return DECODE_FAIL;
664 }
665
666 if (mRawOutput) {
667 status = getRawDataFromSurface();
668 CHECK_STATUS();
669 }
670
671 // frame is successfly decoded to the current surface, it is ready for output
672 if (mShowFrame) {
673 mAcquiredBuffer->renderBuffer.renderDone = false;
674 } else {
675 mAcquiredBuffer->renderBuffer.renderDone = true;
676 }
677
678 // decoder must set "asReference and referenceFrame" flags properly
679
680 // update reference frames
681 if (mAcquiredBuffer->referenceFrame) {
682 if (mManageReference) {
683 // managing reference for MPEG4/H.263/WMV.
684 // AVC should manage reference frame in a different way
685 if (mForwardReference != NULL) {
686 // this foward reference is no longer needed
687 mForwardReference->asReferernce = false;
688 }
689 // Forware reference for either P or B frame prediction
690 mForwardReference = mLastReference;
691 mAcquiredBuffer->asReferernce = true;
692 }
693
694 // the last reference frame.
695 mLastReference = mAcquiredBuffer;
696 }
697 // add to the output list
698 if (mShowFrame) {
699 if (mOutputHead == NULL) {
700 mOutputHead = mAcquiredBuffer;
701 } else {
702 mOutputTail->next = mAcquiredBuffer;
703 }
704 mOutputTail = mAcquiredBuffer;
705 mOutputTail->next = NULL;
706 }
707
708 //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6);
709
710 mAcquiredBuffer = NULL;
711 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces;
712 return DECODE_SUCCESS;
713 }
714
releaseSurfaceBuffer(void)715 Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) {
716 if (mAcquiredBuffer == NULL) {
717 // this is harmless error
718 return DECODE_SUCCESS;
719 }
720
721 // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output.
722 mAcquiredBuffer->asReferernce = false;
723 mAcquiredBuffer->renderBuffer.renderDone = true;
724 mAcquiredBuffer = NULL;
725 return DECODE_SUCCESS;
726 }
727
flushSurfaceBuffers(void)728 void VideoDecoderBase::flushSurfaceBuffers(void) {
729 endDecodingFrame(true);
730 VideoSurfaceBuffer *p = NULL;
731 while (mOutputHead) {
732 mOutputHead->renderBuffer.renderDone = true;
733 p = mOutputHead;
734 mOutputHead = mOutputHead->next;
735 p->next = NULL;
736 }
737 mOutputHead = NULL;
738 mOutputTail = NULL;
739 }
740
endDecodingFrame(bool dropFrame)741 Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) {
742 Decode_Status status = DECODE_SUCCESS;
743 VAStatus vaStatus;
744
745 if (mDecodingFrame == false) {
746 if (mAcquiredBuffer != NULL) {
747 //ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
748 releaseSurfaceBuffer();
749 status = DECODE_FAIL;
750 }
751 return status;
752 }
753 // return through exit label to reset mDecodingFrame
754 if (mAcquiredBuffer == NULL) {
755 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
756 status = DECODE_FAIL;
757 goto exit;
758 }
759
760 vaStatus = vaEndPicture(mVADisplay, mVAContext);
761 if (vaStatus != VA_STATUS_SUCCESS) {
762 releaseSurfaceBuffer();
763 ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus);
764 status = DECODE_DRIVER_FAIL;
765 goto exit;
766 }
767
768 if (dropFrame) {
769 // we are asked to drop this decoded picture
770 VTRACE("Frame dropped in endDecodingFrame");
771 vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface);
772 releaseSurfaceBuffer();
773 goto exit;
774 }
775 status = outputSurfaceBuffer();
776 // fall through
777 exit:
778 mDecodingFrame = false;
779 return status;
780 }
781
782
setupVA(uint32_t numSurface,VAProfile profile,uint32_t numExtraSurface)783 Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface) {
784 VAStatus vaStatus = VA_STATUS_SUCCESS;
785 Decode_Status status;
786
787 if (mVAStarted) {
788 return DECODE_SUCCESS;
789 }
790
791 mRotationDegrees = 0;
792 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){
793 #ifdef TARGET_HAS_ISV
794 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum)
795 #else
796 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)
797 #endif
798 return DECODE_FORMAT_CHANGE;
799
800 numSurface = mConfigBuffer.surfaceNumber;
801 // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode,
802 // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs
803 if (mSizeChanged) {
804 if (mStoreMetaData || (!mStoreMetaData && (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height))) {
805 mSizeChanged = false;
806 return DECODE_FORMAT_CHANGE;
807 }
808 }
809 }
810
811 // TODO: validate profile
812 if (numSurface == 0) {
813 return DECODE_FAIL;
814 }
815
816 if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) {
817 if (numSurface < mConfigBuffer.surfaceNumber) {
818 WTRACE("surface to allocated %d is less than minimum number required %d",
819 numSurface, mConfigBuffer.surfaceNumber);
820 numSurface = mConfigBuffer.surfaceNumber;
821 }
822 }
823
824 if (mVADisplay != NULL) {
825 ETRACE("VA is partially started.");
826 return DECODE_FAIL;
827 }
828
829 // Display is defined as "unsigned int"
830 #ifndef USE_HYBRID_DRIVER
831 mDisplay = new Display;
832 *mDisplay = ANDROID_DISPLAY_HANDLE;
833 #else
834 if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) {
835 ITRACE("Using GEN driver");
836 mDisplay = "libva_driver_name=i965";
837 mUseGEN = true;
838 } else {
839 ITRACE("Using PVR driver");
840 mDisplay = "libva_driver_name=pvr";
841 mUseGEN = false;
842 }
843 #endif
844 mVADisplay = vaGetDisplay(mDisplay);
845 if (mVADisplay == NULL) {
846 ETRACE("vaGetDisplay failed.");
847 return DECODE_DRIVER_FAIL;
848 }
849
850 int majorVersion, minorVersion;
851 vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
852 CHECK_VA_STATUS("vaInitialize");
853
854 if ((int32_t)profile != VAProfileSoftwareDecoding) {
855
856 status = checkHardwareCapability();
857 CHECK_STATUS("checkHardwareCapability");
858
859 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
860 status = getCodecSpecificConfigs(profile, &mVAConfig);
861 CHECK_STATUS("getCodecSpecificAttributes");
862 #else
863 VAConfigAttrib attrib;
864 //We are requesting RT attributes
865 attrib.type = VAConfigAttribRTFormat;
866 attrib.value = VA_RT_FORMAT_YUV420;
867
868 vaStatus = vaCreateConfig(
869 mVADisplay,
870 profile,
871 VAEntrypointVLD,
872 &attrib,
873 1,
874 &mVAConfig);
875 CHECK_VA_STATUS("vaCreateConfig");
876 #endif
877 }
878
879 mNumSurfaces = numSurface;
880 mNumExtraSurfaces = numExtraSurface;
881 mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces];
882 mExtraSurfaces = mSurfaces + mNumSurfaces;
883 for (int i = 0; i < mNumSurfaces + mNumExtraSurfaces; ++i) {
884 mSurfaces[i] = VA_INVALID_SURFACE;
885 }
886 if (mSurfaces == NULL) {
887 return DECODE_MEMORY_FAIL;
888 }
889
890 setRenderRect();
891 setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
892
893 int32_t format = VA_RT_FORMAT_YUV420;
894 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
895 #ifndef USE_AVC_SHORT_FORMAT
896 format |= VA_RT_FORMAT_PROTECTED;
897 WTRACE("Surface is protected.");
898 #endif
899 }
900 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) {
901 if (!mStoreMetaData) {
902 VASurfaceAttrib attribs[2];
903 mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
904 if (mVASurfaceAttrib == NULL) {
905 return DECODE_MEMORY_FAIL;
906 }
907
908 mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
909 if (mVASurfaceAttrib->buffers == NULL) {
910 return DECODE_MEMORY_FAIL;
911 }
912 mVASurfaceAttrib->num_buffers = mNumSurfaces;
913 mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
914 mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
915 mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
916 mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferHStride * mConfigBuffer.graphicBufferVStride * 1.5;
917 mVASurfaceAttrib->num_planes = 2;
918 mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferHStride;
919 mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferHStride;
920 mVASurfaceAttrib->pitches[2] = 0;
921 mVASurfaceAttrib->pitches[3] = 0;
922 mVASurfaceAttrib->offsets[0] = 0;
923 mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferHStride * mConfigBuffer.graphicBufferVStride;
924 mVASurfaceAttrib->offsets[2] = 0;
925 mVASurfaceAttrib->offsets[3] = 0;
926 mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
927 mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
928 if (mConfigBuffer.flag & USE_TILING_MEMORY)
929 mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
930
931 for (int i = 0; i < mNumSurfaces; i++) {
932 mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
933 }
934
935 attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
936 attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
937 attribs[0].value.type = VAGenericValueTypeInteger;
938 attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
939
940 attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
941 attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
942 attribs[1].value.type = VAGenericValueTypePointer;
943 attribs[1].value.value.p = (void *)mVASurfaceAttrib;
944
945 vaStatus = vaCreateSurfaces(
946 mVADisplay,
947 format,
948 mVideoFormatInfo.surfaceWidth,
949 mVideoFormatInfo.surfaceHeight,
950 mSurfaces,
951 mNumSurfaces,
952 attribs,
953 2);
954 }
955 } else {
956 vaStatus = vaCreateSurfaces(
957 mVADisplay,
958 format,
959 mVideoFormatInfo.width,
960 mVideoFormatInfo.height,
961 mSurfaces,
962 mNumSurfaces,
963 NULL,
964 0);
965 mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width;
966 mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height;
967 }
968 CHECK_VA_STATUS("vaCreateSurfaces");
969
970 if (mNumExtraSurfaces != 0) {
971 vaStatus = vaCreateSurfaces(
972 mVADisplay,
973 format,
974 mVideoFormatInfo.surfaceWidth,
975 mVideoFormatInfo.surfaceHeight,
976 mExtraSurfaces,
977 mNumExtraSurfaces,
978 NULL,
979 0);
980 CHECK_VA_STATUS("vaCreateSurfaces");
981 }
982
983 mVideoFormatInfo.surfaceNumber = mNumSurfaces;
984 mVideoFormatInfo.ctxSurfaces = mSurfaces;
985
986 if ((int32_t)profile != VAProfileSoftwareDecoding) {
987 if (mStoreMetaData) {
988 if (mUseGEN) {
989 vaStatus = vaCreateContext(
990 mVADisplay,
991 mVAConfig,
992 mVideoFormatInfo.surfaceWidth,
993 mVideoFormatInfo.surfaceHeight,
994 0,
995 NULL,
996 0,
997 &mVAContext);
998 } else {
999 vaStatus = vaCreateContext(
1000 mVADisplay,
1001 mVAConfig,
1002 mVideoFormatInfo.surfaceWidth,
1003 mVideoFormatInfo.surfaceHeight,
1004 0,
1005 NULL,
1006 mNumSurfaces + mNumExtraSurfaces,
1007 &mVAContext);
1008 }
1009 } else {
1010 vaStatus = vaCreateContext(
1011 mVADisplay,
1012 mVAConfig,
1013 mVideoFormatInfo.surfaceWidth,
1014 mVideoFormatInfo.surfaceHeight,
1015 0,
1016 mSurfaces,
1017 mNumSurfaces + mNumExtraSurfaces,
1018 &mVAContext);
1019 }
1020 CHECK_VA_STATUS("vaCreateContext");
1021 }
1022
1023 mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces];
1024 if (mSurfaceBuffers == NULL) {
1025 return DECODE_MEMORY_FAIL;
1026 }
1027 initSurfaceBuffer(true);
1028
1029 if ((int32_t)profile == VAProfileSoftwareDecoding) {
1030 // derive user pointer from surface for direct access
1031 status = mapSurface();
1032 CHECK_STATUS("mapSurface")
1033 }
1034
1035 setRotationDegrees(mConfigBuffer.rotationDegrees);
1036
1037 mVAStarted = true;
1038
1039 pthread_mutex_lock(&mLock);
1040 if (mStoreMetaData) {
1041 for (uint32_t i = 0; i < mMetaDataBuffersNum; i++) {
1042 status = createSurfaceFromHandle(i);
1043 CHECK_STATUS("createSurfaceFromHandle");
1044 mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
1045 }
1046 }
1047 pthread_mutex_unlock(&mLock);
1048
1049 return DECODE_SUCCESS;
1050 }
1051
terminateVA(void)1052 Decode_Status VideoDecoderBase::terminateVA(void) {
1053 mSignalBufferSize = 0;
1054 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
1055 mSignalBufferPre[i] = NULL;
1056 }
1057
1058 if (mVAStarted == false) {
1059 // VA hasn't been started yet
1060 return DECODE_SUCCESS;
1061 }
1062
1063 if (mSurfaceBuffers) {
1064 for (int32_t i = 0; i < mNumSurfaces; i++) {
1065 if (mSurfaceBuffers[i].renderBuffer.rawData) {
1066 if (mSurfaceBuffers[i].renderBuffer.rawData->data) {
1067 delete [] mSurfaceBuffers[i].renderBuffer.rawData->data;
1068 }
1069 delete mSurfaceBuffers[i].renderBuffer.rawData;
1070 }
1071 if (mSurfaceBuffers[i].mappedData) {
1072 // don't delete data pointer as it is mapped from surface
1073 delete mSurfaceBuffers[i].mappedData;
1074 }
1075 }
1076 delete [] mSurfaceBuffers;
1077 mSurfaceBuffers = NULL;
1078 }
1079
1080 if (mVASurfaceAttrib) {
1081 if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers);
1082 delete mVASurfaceAttrib;
1083 mVASurfaceAttrib = NULL;
1084 }
1085
1086
1087 if (mSurfaceUserPtr) {
1088 delete [] mSurfaceUserPtr;
1089 mSurfaceUserPtr = NULL;
1090 }
1091
1092 if (mSurfaces) {
1093 vaDestroySurfaces(mVADisplay, mSurfaces, mStoreMetaData ? mMetaDataBuffersNum : (mNumSurfaces + mNumExtraSurfaces));
1094 delete [] mSurfaces;
1095 mSurfaces = NULL;
1096 }
1097
1098 if (mVAContext != VA_INVALID_ID) {
1099 vaDestroyContext(mVADisplay, mVAContext);
1100 mVAContext = VA_INVALID_ID;
1101 }
1102
1103 if (mVAConfig != VA_INVALID_ID) {
1104 vaDestroyConfig(mVADisplay, mVAConfig);
1105 mVAConfig = VA_INVALID_ID;
1106 }
1107
1108 if (mVADisplay) {
1109 vaTerminate(mVADisplay);
1110 mVADisplay = NULL;
1111 }
1112
1113 if (mDisplay) {
1114 #ifndef USE_HYBRID_DRIVER
1115 delete mDisplay;
1116 #endif
1117 mDisplay = NULL;
1118 }
1119
1120 mVAStarted = false;
1121 mInitialized = false;
1122 mErrReportEnabled = false;
1123 if (mStoreMetaData) {
1124 mMetaDataBuffersNum = 0;
1125 mSurfaceAcquirePos = 0;
1126 }
1127 return DECODE_SUCCESS;
1128 }
1129
parseBuffer(uint8_t * buffer,int32_t size,bool config,void ** vbpData)1130 Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) {
1131 // DON'T check if mVAStarted == true
1132 if (mParserHandle == NULL) {
1133 return DECODE_NO_PARSER;
1134 }
1135
1136 uint32_t vbpStatus;
1137 if (buffer == NULL || size <= 0) {
1138 return DECODE_INVALID_DATA;
1139 }
1140
1141 uint8_t configFlag = config ? 1 : 0;
1142 vbpStatus = mParserParse(mParserHandle, buffer, size, configFlag);
1143 CHECK_VBP_STATUS("vbp_parse");
1144
1145 vbpStatus = mParserQuery(mParserHandle, vbpData);
1146 CHECK_VBP_STATUS("vbp_query");
1147
1148 return DECODE_SUCCESS;
1149 }
1150
mapSurface(void)1151 Decode_Status VideoDecoderBase::mapSurface(void) {
1152 VAStatus vaStatus = VA_STATUS_SUCCESS;
1153 VAImage image;
1154 uint8_t *userPtr;
1155 mSurfaceUserPtr = new uint8_t* [mNumSurfaces];
1156 if (mSurfaceUserPtr == NULL) {
1157 return DECODE_MEMORY_FAIL;
1158 }
1159
1160 for (int32_t i = 0; i< mNumSurfaces; i++) {
1161 vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image);
1162 CHECK_VA_STATUS("vaDeriveImage");
1163 vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr);
1164 CHECK_VA_STATUS("vaMapBuffer");
1165 mSurfaceUserPtr[i] = userPtr;
1166 mSurfaceBuffers[i].mappedData = new VideoFrameRawData;
1167 if (mSurfaceBuffers[i].mappedData == NULL) {
1168 return DECODE_MEMORY_FAIL;
1169 }
1170 mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released
1171 mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer
1172 mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc;
1173 mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width;
1174 mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height;
1175 mSurfaceBuffers[i].mappedData->size = image.data_size;
1176 for (int pi = 0; pi < 3; pi++) {
1177 mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi];
1178 mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi];
1179 }
1180 // debug information
1181 if (image.pitches[0] != image.pitches[1] ||
1182 image.width != mVideoFormatInfo.width ||
1183 image.height != mVideoFormatInfo.height ||
1184 image.offsets[0] != 0) {
1185 WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]);
1186 }
1187 // TODO: do we need to unmap buffer?
1188 //vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
1189 //CHECK_VA_STATUS("vaMapBuffer");
1190 vaStatus = vaDestroyImage(mVADisplay,image.image_id);
1191 CHECK_VA_STATUS("vaDestroyImage");
1192
1193 }
1194 return DECODE_SUCCESS;
1195 }
1196
getRawDataFromSurface(VideoRenderBuffer * renderBuffer,uint8_t * pRawData,uint32_t * pSize,bool internal)1197 Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) {
1198 if (internal) {
1199 if (mAcquiredBuffer == NULL) {
1200 return DECODE_FAIL;
1201 }
1202 renderBuffer = &(mAcquiredBuffer->renderBuffer);
1203 }
1204
1205 VAStatus vaStatus;
1206 VAImage vaImage;
1207 vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface);
1208 CHECK_VA_STATUS("vaSyncSurface");
1209
1210 vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage);
1211 CHECK_VA_STATUS("vaDeriveImage");
1212
1213 void *pBuf = NULL;
1214 vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf);
1215 CHECK_VA_STATUS("vaMapBuffer");
1216
1217
1218 // size in NV12 format
1219 uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1220 uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1221 if (strcasecmp(mVideoFormatInfo.mimeType,"video/avc") == 0 ||
1222 strcasecmp(mVideoFormatInfo.mimeType,"video/h264") == 0) {
1223 cropHeight = mVideoFormatInfo.height;
1224 cropWidth = mVideoFormatInfo.width;
1225 }
1226 int32_t size = cropWidth * cropHeight * 3 / 2;
1227
1228 if (internal) {
1229 VideoFrameRawData *rawData = NULL;
1230 if (renderBuffer->rawData == NULL) {
1231 rawData = new VideoFrameRawData;
1232 if (rawData == NULL) {
1233 return DECODE_MEMORY_FAIL;
1234 }
1235 memset(rawData, 0, sizeof(VideoFrameRawData));
1236 renderBuffer->rawData = rawData;
1237 } else {
1238 rawData = renderBuffer->rawData;
1239 }
1240
1241 if (rawData->data != NULL && rawData->size != size) {
1242 delete [] rawData->data;
1243 rawData->data = NULL;
1244 rawData->size = 0;
1245 }
1246 if (rawData->data == NULL) {
1247 rawData->data = new uint8_t [size];
1248 if (rawData->data == NULL) {
1249 return DECODE_MEMORY_FAIL;
1250 }
1251 }
1252
1253 rawData->own = true; // allocated by this library
1254 rawData->width = cropWidth;
1255 rawData->height = cropHeight;
1256 rawData->pitch[0] = cropWidth;
1257 rawData->pitch[1] = cropWidth;
1258 rawData->pitch[2] = 0; // interleaved U/V, two planes
1259 rawData->offset[0] = 0;
1260 rawData->offset[1] = cropWidth * cropHeight;
1261 rawData->offset[2] = cropWidth * cropHeight * 3 / 2;
1262 rawData->size = size;
1263 rawData->fourcc = 'NV12';
1264
1265 pRawData = rawData->data;
1266 } else {
1267 *pSize = size;
1268 }
1269
1270 if (size == (int32_t)vaImage.data_size) {
1271 #ifdef __SSE4_1__
1272 stream_memcpy(pRawData, pBuf, size);
1273 #else
1274 memcpy(pRawData, pBuf, size);
1275 #endif
1276 } else {
1277 // copy Y data
1278 uint8_t *src = (uint8_t*)pBuf;
1279 uint8_t *dst = pRawData;
1280 uint32_t row = 0;
1281 for (row = 0; row < cropHeight; row++) {
1282 #ifdef __SSE4_1__
1283 stream_memcpy(dst, src, cropWidth);
1284 #else
1285 memcpy(dst, src, cropWidth);
1286 #endif
1287 dst += cropWidth;
1288 src += vaImage.pitches[0];
1289 }
1290 // copy interleaved V and U data
1291 src = (uint8_t*)pBuf + vaImage.offsets[1];
1292 for (row = 0; row < cropHeight / 2; row++) {
1293 #ifdef __SSE4_1__
1294 stream_memcpy(dst, src, cropWidth);
1295 #else
1296 memcpy(dst, src, cropWidth);
1297 #endif
1298 dst += cropWidth;
1299 src += vaImage.pitches[1];
1300 }
1301 }
1302
1303 vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf);
1304 CHECK_VA_STATUS("vaUnmapBuffer");
1305
1306 vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id);
1307 CHECK_VA_STATUS("vaDestroyImage");
1308
1309 return DECODE_SUCCESS;
1310 }
1311
createSurfaceFromHandle(int index)1312 Decode_Status VideoDecoderBase::createSurfaceFromHandle(int index) {
1313 VAStatus vaStatus = VA_STATUS_SUCCESS;
1314 Decode_Status status;
1315
1316 int32_t format = VA_RT_FORMAT_YUV420;
1317 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
1318 #ifndef USE_AVC_SHORT_FORMAT
1319 format |= VA_RT_FORMAT_PROTECTED;
1320 WTRACE("Surface is protected.");
1321 #endif
1322 }
1323 VASurfaceAttrib attribs[2];
1324 VASurfaceAttribExternalBuffers surfExtBuf;
1325 surfExtBuf.num_buffers = 1;
1326 surfExtBuf.pixel_format = VA_FOURCC_NV12;
1327 surfExtBuf.width = mVideoFormatInfo.surfaceWidth;
1328 surfExtBuf.height = mVideoFormatInfo.surfaceHeight;
1329 surfExtBuf.data_size = mConfigBuffer.graphicBufferHStride * mConfigBuffer.graphicBufferVStride * 1.5;
1330 surfExtBuf.num_planes = 2;
1331 surfExtBuf.pitches[0] = mConfigBuffer.graphicBufferHStride;
1332 surfExtBuf.pitches[1] = mConfigBuffer.graphicBufferHStride;
1333 surfExtBuf.pitches[2] = 0;
1334 surfExtBuf.pitches[3] = 0;
1335 surfExtBuf.offsets[0] = 0;
1336 surfExtBuf.offsets[1] = mConfigBuffer.graphicBufferHStride * mConfigBuffer.graphicBufferVStride;
1337 surfExtBuf.offsets[2] = 0;
1338 surfExtBuf.offsets[3] = 0;
1339 surfExtBuf.private_data = (void *)mConfigBuffer.nativeWindow;
1340 surfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
1341 if (mConfigBuffer.flag & USE_TILING_MEMORY) {
1342 surfExtBuf.flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
1343 }
1344
1345 surfExtBuf.buffers = (long unsigned int*)&(mConfigBuffer.graphicBufferHandler[index]);
1346
1347 attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
1348 attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
1349 attribs[0].value.type = VAGenericValueTypeInteger;
1350 attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
1351
1352 attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
1353 attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
1354 attribs[1].value.type = VAGenericValueTypePointer;
1355 attribs[1].value.value.p = (void *)&surfExtBuf;
1356
1357 vaStatus = vaCreateSurfaces(
1358 mVADisplay,
1359 format,
1360 mVideoFormatInfo.surfaceWidth,
1361 mVideoFormatInfo.surfaceHeight,
1362 &(mSurfaces[index]),
1363 1,
1364 attribs,
1365 2);
1366 CHECK_VA_STATUS("vaCreateSurfaces");
1367
1368 return DECODE_SUCCESS;
1369 }
1370
initSurfaceBuffer(bool reset)1371 void VideoDecoderBase::initSurfaceBuffer(bool reset) {
1372 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
1373 if (useGraphicBuffer && reset) {
1374 pthread_mutex_lock(&mLock);
1375 }
1376 for (int32_t i = 0; i < mNumSurfaces; i++) {
1377 mSurfaceBuffers[i].renderBuffer.display = mVADisplay;
1378 mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer
1379 mSurfaceBuffers[i].renderBuffer.flag = 0;
1380 mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE;
1381 mSurfaceBuffers[i].renderBuffer.timeStamp = 0;
1382 mSurfaceBuffers[i].referenceFrame = false;
1383 mSurfaceBuffers[i].asReferernce= false;
1384 mSurfaceBuffers[i].pictureOrder = 0;
1385 mSurfaceBuffers[i].next = NULL;
1386 if (reset == true) {
1387 mSurfaceBuffers[i].renderBuffer.rawData = NULL;
1388 mSurfaceBuffers[i].mappedData = NULL;
1389 }
1390 if (useGraphicBuffer) {
1391 if (reset) {
1392 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i];
1393 mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false
1394 for (uint32_t j = 0; j < mSignalBufferSize; j++) {
1395 if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) {
1396 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1397 VTRACE("initSurfaceBuffer set renderDone = true index = %d", i);
1398 mSignalBufferPre[j] = NULL;
1399 break;
1400 }
1401 }
1402 } else {
1403 mSurfaceBuffers[i].renderBuffer.renderDone = false;
1404 }
1405 } else {
1406 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL;
1407 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1408 }
1409 mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
1410 }
1411
1412 if (useGraphicBuffer && reset) {
1413 mInitialized = true;
1414 mSignalBufferSize = 0;
1415 pthread_mutex_unlock(&mLock);
1416 }
1417 }
1418
signalRenderDone(void * graphichandler,bool isNew)1419 Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler, bool isNew) {
1420 Decode_Status status;
1421 if (graphichandler == NULL) {
1422 return DECODE_SUCCESS;
1423 }
1424 pthread_mutex_lock(&mLock);
1425 bool graphicBufferMode = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
1426 if (mStoreMetaData) {
1427 if (!graphicBufferMode) {
1428 pthread_mutex_unlock(&mLock);
1429 return DECODE_SUCCESS;
1430 }
1431
1432 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
1433 mConfigBuffer.graphicBufferHandler[mMetaDataBuffersNum] = graphichandler;
1434 if (mInitialized) {
1435 mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferHandle = graphichandler;
1436 mSurfaceBuffers[mMetaDataBuffersNum].renderBuffer.graphicBufferIndex = mMetaDataBuffersNum;
1437 }
1438 }
1439 }
1440 int i = 0;
1441 if (!mInitialized) {
1442 if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) {
1443 pthread_mutex_unlock(&mLock);
1444 return DECODE_INVALID_DATA;
1445 }
1446 mSignalBufferPre[mSignalBufferSize++] = graphichandler;
1447 VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize);
1448 } else {
1449 if (!graphicBufferMode) {
1450 pthread_mutex_unlock(&mLock);
1451 return DECODE_SUCCESS;
1452 }
1453 if (mStoreMetaData) {
1454 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
1455 if (mVAStarted) {
1456 status = createSurfaceFromHandle(mMetaDataBuffersNum);
1457 CHECK_STATUS("createSurfaceFromHandle")
1458 }
1459 }
1460 }
1461 for (i = 0; i < mNumSurfaces; i++) {
1462 if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) {
1463 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1464 VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i);
1465 break;
1466 }
1467 }
1468 }
1469
1470 if (mStoreMetaData) {
1471 if ((mMetaDataBuffersNum < mConfigBuffer.surfaceNumber) && isNew) {
1472 mMetaDataBuffersNum++;
1473 }
1474 }
1475
1476 pthread_mutex_unlock(&mLock);
1477
1478 return DECODE_SUCCESS;
1479
1480 }
1481
querySurfaceRenderStatus(VideoSurfaceBuffer * surface)1482 void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) {
1483 VASurfaceStatus surfStat = VASurfaceReady;
1484 VAStatus vaStat = VA_STATUS_SUCCESS;
1485
1486 if (!surface) {
1487 LOGW("SurfaceBuffer not ready yet");
1488 return;
1489 }
1490 surface->renderBuffer.driverRenderDone = true;
1491
1492 #ifndef USE_GEN_HW
1493 if (surface->renderBuffer.surface != VA_INVALID_SURFACE &&
1494 (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
1495
1496 vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat);
1497
1498 if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady))
1499 surface->renderBuffer.driverRenderDone = false;
1500
1501 }
1502 #endif
1503
1504 }
1505
1506 // This function should be called before start() to load different type of parsers
1507 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
setParserType(_vbp_parser_type type)1508 Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) {
1509 if ((int32_t)type != VBP_INVALID) {
1510 ITRACE("Parser Type = %d", (int32_t)type);
1511 mParserType = type;
1512 return DECODE_SUCCESS;
1513 } else {
1514 ETRACE("Invalid parser type = %d", (int32_t)type);
1515 return DECODE_NO_PARSER;
1516 }
1517 }
1518
updateBuffer(uint8_t * buffer,int32_t size,void ** vbpData)1519 Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) {
1520 if (mParserHandle == NULL) {
1521 return DECODE_NO_PARSER;
1522 }
1523
1524 uint32_t vbpStatus;
1525 if (buffer == NULL || size <= 0) {
1526 return DECODE_INVALID_DATA;
1527 }
1528
1529 vbpStatus = mParserUpdate(mParserHandle, buffer, size, vbpData);
1530 CHECK_VBP_STATUS("vbp_update");
1531
1532 return DECODE_SUCCESS;
1533 }
1534
queryBuffer(void ** vbpData)1535 Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) {
1536 if (mParserHandle == NULL) {
1537 return DECODE_NO_PARSER;
1538 }
1539
1540 uint32_t vbpStatus;
1541 vbpStatus = mParserQuery(mParserHandle, vbpData);
1542 CHECK_VBP_STATUS("vbp_query");
1543
1544 return DECODE_SUCCESS;
1545 }
1546
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)1547 Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) {
1548 VAStatus vaStatus;
1549 VAConfigAttrib attrib;
1550 attrib.type = VAConfigAttribRTFormat;
1551 attrib.value = VA_RT_FORMAT_YUV420;
1552
1553 if (config == NULL) {
1554 ETRACE("Invalid parameter!");
1555 return DECODE_FAIL;
1556 }
1557
1558 vaStatus = vaCreateConfig(
1559 mVADisplay,
1560 profile,
1561 VAEntrypointVLD,
1562 &attrib,
1563 1,
1564 config);
1565
1566 CHECK_VA_STATUS("vaCreateConfig");
1567
1568 return DECODE_SUCCESS;
1569 }
1570 #endif
checkHardwareCapability()1571 Decode_Status VideoDecoderBase::checkHardwareCapability() {
1572 return DECODE_SUCCESS;
1573 }
1574
drainDecodingErrors(VideoErrorBuffer * outErrBuf,VideoRenderBuffer * currentSurface)1575 void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) {
1576 if (mErrReportEnabled && outErrBuf && currentSurface) {
1577 memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer));
1578
1579 currentSurface->errBuf.errorNumber = 0;
1580 currentSurface->errBuf.timeStamp = INVALID_PTS;
1581 }
1582 if (outErrBuf)
1583 VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber);
1584 }
1585
fillDecodingErrors(VideoRenderBuffer * currentSurface)1586 void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) {
1587 VAStatus ret;
1588
1589 if (mErrReportEnabled) {
1590 currentSurface->errBuf.timeStamp = currentSurface->timeStamp;
1591 // TODO: is 10 a suitable number?
1592 VASurfaceDecodeMBErrors *err_drv_output = NULL;
1593 ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output);
1594 if (ret || !err_drv_output) {
1595 WTRACE("vaQuerySurfaceError failed.");
1596 return;
1597 }
1598
1599 int offset = 0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1
1600 for (int i = 0; i < MAX_ERR_NUM - offset; i++) {
1601 if (err_drv_output[i].status != -1) {
1602 currentSurface->errBuf.errorNumber++;
1603 currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError;
1604 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb;
1605 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb;
1606 currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1;
1607 ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d",
1608 currentSurface->errBuf.errorNumber - 1,
1609 currentSurface->errBuf.errorArray[i + offset].type,
1610 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb,
1611 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb);
1612 } else break;
1613 }
1614 ITRACE("%s: error number of current surface is %d, timestamp @%llu",
1615 __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp);
1616 }
1617 }
1618
setRotationDegrees(int32_t rotationDegrees)1619 void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) {
1620 if (mRotationDegrees == rotationDegrees) {
1621 return;
1622 }
1623
1624 ITRACE("set new rotation degree: %d", rotationDegrees);
1625 VADisplayAttribute rotate;
1626 rotate.type = VADisplayAttribRotation;
1627 rotate.value = VA_ROTATION_NONE;
1628 if (rotationDegrees == 0)
1629 rotate.value = VA_ROTATION_NONE;
1630 else if (rotationDegrees == 90)
1631 rotate.value = VA_ROTATION_90;
1632 else if (rotationDegrees == 180)
1633 rotate.value = VA_ROTATION_180;
1634 else if (rotationDegrees == 270)
1635 rotate.value = VA_ROTATION_270;
1636
1637 VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1);
1638 if (ret) {
1639 ETRACE("Failed to set rotation degree.");
1640 }
1641 mRotationDegrees = rotationDegrees;
1642 }
1643
setRenderRect()1644 void VideoDecoderBase::setRenderRect() {
1645
1646 if (!mVADisplay)
1647 return;
1648
1649 VAStatus ret;
1650 VARectangle rect;
1651 rect.x = mVideoFormatInfo.cropLeft;
1652 rect.y = mVideoFormatInfo.cropTop;
1653 rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1654 rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1655 if (strcasecmp(mVideoFormatInfo.mimeType,"video/avc") == 0 ||
1656 strcasecmp(mVideoFormatInfo.mimeType,"video/h264") == 0) {
1657 rect.height = mVideoFormatInfo.height;
1658 rect.width = mVideoFormatInfo.width;
1659 }
1660
1661 VADisplayAttribute render_rect;
1662 render_rect.type = VADisplayAttribRenderRect;
1663 render_rect.attrib_ptr = ▭
1664
1665 ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1);
1666 if (ret) {
1667 ETRACE("Failed to set rotation degree.");
1668 }
1669 }
1670
setColorSpaceInfo(int32_t colorMatrix,int32_t videoRange)1671 void VideoDecoderBase::setColorSpaceInfo(int32_t colorMatrix, int32_t videoRange) {
1672 ITRACE("set colorMatrix: 0x%x ", colorMatrix);
1673 VADisplayAttribute cm;
1674 cm.type = VADisplayAttribCSCMatrix;
1675 if (colorMatrix == VA_SRC_BT601) {
1676 cm.attrib_ptr = &s601;
1677 } else if (colorMatrix == VA_SRC_BT709) {
1678 cm.attrib_ptr = &s709;
1679 } else {
1680 // if we can't get the color matrix or it's not BT601 or BT709
1681 // we decide the color matrix according to clip resolution
1682 if (mVideoFormatInfo.width < 1280 && mVideoFormatInfo.height < 720)
1683 cm.attrib_ptr = &s601;
1684 else
1685 cm.attrib_ptr = &s709;
1686 }
1687
1688 VAStatus ret = vaSetDisplayAttributes(mVADisplay, &cm, 1);
1689
1690 if (ret) {
1691 ETRACE("Failed to set colorMatrix.");
1692 }
1693
1694 // 1: full range, 0: reduced range
1695 ITRACE("set videoRange: %d ", videoRange);
1696 VADisplayAttribute vr;
1697 vr.type = VADisplayAttribColorRange;
1698 vr.value = (videoRange == 1) ? VA_SOURCE_RANGE_FULL : VA_SOURCE_RANGE_REDUCED;
1699
1700 ret = vaSetDisplayAttributes(mVADisplay, &vr, 1);
1701
1702 if (ret) {
1703 ETRACE("Failed to set videoRange.");
1704 }
1705 }
1706