1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderAVC.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <cutils/properties.h>
21
22 // Macros for actual buffer needed calculation
23 #define WIDI_CONSUMED 6
24 #define HDMI_CONSUMED 2
25 #define NW_CONSUMED 2
26 #define POC_DEFAULT 0x7FFFFFFF
27
VideoDecoderAVC(const char * mimeType)28 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
29 : VideoDecoderBase(mimeType, VBP_H264),
30 mToggleDPB(0),
31 mErrorConcealment(false){
32
33 invalidateDPB(0);
34 invalidateDPB(1);
35 mLastPictureFlags = VA_PICTURE_H264_INVALID;
36 }
37
~VideoDecoderAVC()38 VideoDecoderAVC::~VideoDecoderAVC() {
39 stop();
40 }
41
start(VideoConfigBuffer * buffer)42 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
43 Decode_Status status;
44
45 status = VideoDecoderBase::start(buffer);
46 CHECK_STATUS("VideoDecoderBase::start");
47
48 // We don't want base class to manage reference.
49 VideoDecoderBase::ManageReference(false);
50 // output by picture order count
51 VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
52
53 mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
54 if (buffer->data == NULL || buffer->size == 0) {
55 WTRACE("No config data to start VA.");
56 if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
57 ITRACE("Used client supplied profile and surface to start VA.");
58 return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
59 }
60 return DECODE_SUCCESS;
61 }
62
63 vbp_data_h264 *data = NULL;
64 status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
65 CHECK_STATUS("VideoDecoderBase::parseBuffer");
66
67 status = startVA(data);
68 return status;
69 }
70
stop(void)71 void VideoDecoderAVC::stop(void) {
72 // drop the last frame and ignore return value
73 endDecodingFrame(true);
74 VideoDecoderBase::stop();
75 invalidateDPB(0);
76 invalidateDPB(1);
77 mToggleDPB = 0;
78 mErrorConcealment = false;
79 mLastPictureFlags = VA_PICTURE_H264_INVALID;
80 }
81
flush(void)82 void VideoDecoderAVC::flush(void) {
83 // drop the frame and ignore return value
84 VideoDecoderBase::flush();
85 invalidateDPB(0);
86 invalidateDPB(1);
87 mToggleDPB = 0;
88 mLastPictureFlags = VA_PICTURE_H264_INVALID;
89 }
90
decode(VideoDecodeBuffer * buffer)91 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
92 Decode_Status status;
93 vbp_data_h264 *data = NULL;
94 if (buffer == NULL) {
95 return DECODE_INVALID_DATA;
96 }
97 status = VideoDecoderBase::parseBuffer(
98 buffer->data,
99 buffer->size,
100 false,
101 (void**)&data);
102 CHECK_STATUS("VideoDecoderBase::parseBuffer");
103
104 if (!mVAStarted) {
105 if (data->has_sps && data->has_pps) {
106 status = startVA(data);
107 CHECK_STATUS("startVA");
108 } else {
109 WTRACE("Can't start VA as either SPS or PPS is still not available.");
110 return DECODE_SUCCESS;
111 }
112 }
113
114 VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
115
116 status = decodeFrame(buffer, data);
117 if (status == DECODE_MULTIPLE_FRAME) {
118 buffer->ext = &mExtensionBuffer;
119 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
120 mExtensionBuffer.extSize = sizeof(mPackedFrame);
121 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
122 }
123 return status;
124 }
125
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_h264 * data)126 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
127 Decode_Status status;
128 if (data->has_sps == 0 || data->has_pps == 0) {
129 return DECODE_NO_CONFIG;
130 }
131
132 mVideoFormatInfo.flags = 0;
133 uint32_t fieldFlags = 0;
134 for (unsigned int i = 0; i < data->num_pictures; i++) {
135 VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
136 fieldFlags |= pic.flags;
137 // Don't remove the following codes, it can be enabled for debugging DPB.
138 #if 0
139 VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
140 i,
141 buffer->timeStamp/1E6,
142 pic.TopFieldOrderCnt,
143 pic.BottomFieldOrderCnt,
144 pic.flags,
145 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
146 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
147 #endif
148 }
149 int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
150 int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
151 if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
152 mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
153 }
154
155 if (data->new_sps || data->new_pps) {
156 status = handleNewSequence(data);
157 CHECK_STATUS("handleNewSequence");
158 }
159
160 if (isWiDiStatusChanged()) {
161 mSizeChanged = false;
162 flushSurfaceBuffers();
163 return DECODE_FORMAT_CHANGE;
164 }
165
166 // first pic_data always exists, check if any slice is parsed
167 if (data->pic_data[0].num_slices == 0) {
168 ITRACE("No slice available for decoding.");
169 status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
170 mSizeChanged = false;
171 return status;
172 }
173
174 uint64_t lastPTS = mCurrentPTS;
175 mCurrentPTS = buffer->timeStamp;
176 //if (lastPTS != mCurrentPTS) {
177 if (isNewFrame(data, lastPTS == mCurrentPTS)) {
178 if (mLowDelay) {
179 // start decoding a new frame
180 status = beginDecodingFrame(data);
181 if (status != DECODE_SUCCESS) {
182 Decode_Status st = status;
183 // finish decoding the last frame if
184 // encounter error when decode the new frame
185 status = endDecodingFrame(false);
186 CHECK_STATUS("endDecodingFrame");
187 return st;
188 }
189 }
190
191 // finish decoding the last frame
192 status = endDecodingFrame(false);
193 CHECK_STATUS("endDecodingFrame");
194
195 if (!mLowDelay) {
196 // start decoding a new frame
197 status = beginDecodingFrame(data);
198 CHECK_STATUS("beginDecodingFrame");
199 }
200 } else {
201 status = continueDecodingFrame(data);
202 CHECK_STATUS("continueDecodingFrame");
203 }
204
205 // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
206 #if 0
207 if (buffer->flag & HAS_COMPLETE_FRAME) {
208 // finish decoding current frame
209 status = endDecodingFrame(false);
210 CHECK_STATUS("endDecodingFrame");
211 }
212 #endif
213 return DECODE_SUCCESS;
214 }
215
beginDecodingFrame(vbp_data_h264 * data)216 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
217 Decode_Status status;
218
219 status = acquireSurfaceBuffer();
220 CHECK_STATUS("acquireSurfaceBuffer");
221 VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
222 if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
223 (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
224 mAcquiredBuffer->referenceFrame = true;
225 } else {
226 mAcquiredBuffer->referenceFrame = false;
227 }
228 // set asReference in updateDPB
229
230 if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
231 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
232 } else {
233 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
234 }
235
236 // TODO: Set the discontinuity flag
237 mAcquiredBuffer->renderBuffer.flag = 0;
238 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
239 mAcquiredBuffer->pictureOrder = getPOC(picture);
240
241 if (mSizeChanged) {
242 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
243 mSizeChanged = false;
244 }
245
246 status = continueDecodingFrame(data);
247 // surface buffer is released if decode fails
248 return status;
249 }
250
251
continueDecodingFrame(vbp_data_h264 * data)252 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
253 Decode_Status status;
254 vbp_picture_data_h264 *picData = data->pic_data;
255
256 // TODO: remove these debugging codes
257 if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
258 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
259 return DECODE_FAIL;
260 }
261 for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
262 // sanity check
263 if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
264 return DECODE_PARSER_FAIL;
265 }
266
267 if (picIndex > 0 &&
268 (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
269 // it is a packed frame buffer
270 vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
271 vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
272 mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
273 mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
274 ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
275 return DECODE_MULTIPLE_FRAME;
276 }
277
278 for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
279 status = decodeSlice(data, picIndex, sliceIndex);
280 if (status != DECODE_SUCCESS) {
281 endDecodingFrame(true);
282 // TODO: this is new code
283 // remove current frame from DPB as it can't be decoded.
284 removeReferenceFromDPB(picData->pic_parms);
285 return status;
286 }
287 }
288 }
289 return DECODE_SUCCESS;
290 }
291
decodeSlice(vbp_data_h264 * data,uint32_t picIndex,uint32_t sliceIndex)292 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
293 Decode_Status status;
294 VAStatus vaStatus;
295 uint32_t bufferIDCount = 0;
296 // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
297 VABufferID bufferIDs[4];
298
299 vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
300 vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
301 VAPictureParameterBufferH264 *picParam = picData->pic_parms;
302 VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
303
304 if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
305 // either condition indicates start of a new frame
306 if (sliceParam->first_mb_in_slice != 0) {
307 WTRACE("The first slice is lost.");
308 // TODO: handle the first slice lost
309 }
310 if (mDecodingFrame) {
311 // interlace content, complete decoding the first field
312 vaStatus = vaEndPicture(mVADisplay, mVAContext);
313 CHECK_VA_STATUS("vaEndPicture");
314
315 // for interlace content, top field may be valid only after the second field is parsed
316 int32_t poc = getPOC(&(picParam->CurrPic));
317 if (poc < mAcquiredBuffer->pictureOrder) {
318 mAcquiredBuffer->pictureOrder = poc;
319 }
320 }
321
322 // Check there is no reference frame loss before decoding a frame
323
324 // Update the reference frames and surface IDs for DPB and current frame
325 status = updateDPB(picParam);
326 CHECK_STATUS("updateDPB");
327
328 #ifndef USE_AVC_SHORT_FORMAT
329 //We have to provide a hacked DPB rather than complete DPB for libva as workaround
330 status = updateReferenceFrames(picData);
331 CHECK_STATUS("updateReferenceFrames");
332 #endif
333 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
334 CHECK_VA_STATUS("vaBeginPicture");
335
336 // start decoding a frame
337 mDecodingFrame = true;
338
339 vaStatus = vaCreateBuffer(
340 mVADisplay,
341 mVAContext,
342 VAPictureParameterBufferType,
343 sizeof(VAPictureParameterBufferH264),
344 1,
345 picParam,
346 &bufferIDs[bufferIDCount]);
347 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
348 bufferIDCount++;
349
350 vaStatus = vaCreateBuffer(
351 mVADisplay,
352 mVAContext,
353 VAIQMatrixBufferType,
354 sizeof(VAIQMatrixBufferH264),
355 1,
356 data->IQ_matrix_buf,
357 &bufferIDs[bufferIDCount]);
358 CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
359 bufferIDCount++;
360 }
361
362 #ifndef USE_AVC_SHORT_FORMAT
363
364 status = setReference(sliceParam);
365 CHECK_STATUS("setReference");
366
367 vaStatus = vaCreateBuffer(
368 mVADisplay,
369 mVAContext,
370 VASliceParameterBufferType,
371 sizeof(VASliceParameterBufferH264),
372 1,
373 sliceParam,
374 &bufferIDs[bufferIDCount]);
375 #else
376 vaStatus = vaCreateBuffer(
377 mVADisplay,
378 mVAContext,
379 VASliceParameterBufferType,
380 sizeof(VASliceParameterBufferH264Base),
381 1,
382 sliceParam,
383 &bufferIDs[bufferIDCount]);
384 #endif
385 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
386 bufferIDCount++;
387
388 vaStatus = vaCreateBuffer(
389 mVADisplay,
390 mVAContext,
391 VASliceDataBufferType,
392 sliceData->slice_size, //size
393 1, //num_elements
394 sliceData->buffer_addr + sliceData->slice_offset,
395 &bufferIDs[bufferIDCount]);
396 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
397 bufferIDCount++;
398
399 vaStatus = vaRenderPicture(
400 mVADisplay,
401 mVAContext,
402 bufferIDs,
403 bufferIDCount);
404 CHECK_VA_STATUS("vaRenderPicture");
405
406 return DECODE_SUCCESS;
407 }
408
setReference(VASliceParameterBufferH264 * sliceParam)409 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
410 int32_t numList = 1;
411 // TODO: set numList to 0 if it is I slice
412 if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
413 // B slice
414 numList = 2;
415 }
416
417 int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
418 VAPictureH264 *ref = sliceParam->RefPicList0;
419
420 for (int32_t i = 0; i < numList; i++) {
421 if (activeMinus1 >= REF_LIST_SIZE) {
422 ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
423 return DECODE_PARSER_FAIL;
424 }
425 for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
426 if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
427 ref->picture_id = findSurface(ref);
428 if (ref->picture_id == VA_INVALID_SURFACE) {
429 // Error DecodeRefMissing is counted once even there're multiple
430 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
431 mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
432
433 if (mLastReference) {
434 WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
435 ref->picture_id = mLastReference->renderBuffer.surface;
436 } else {
437 ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
438 return DECODE_NO_REFERENCE;
439 }
440 }
441 }
442 }
443 activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
444 ref = sliceParam->RefPicList1;
445 }
446 return DECODE_SUCCESS;
447 }
448
updateDPB(VAPictureParameterBufferH264 * picParam)449 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
450 clearAsReference(mToggleDPB);
451 // pointer to toggled DPB (new)
452 DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
453 VAPictureH264 *ref = picParam->ReferenceFrames;
454
455 // update current picture ID
456 picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
457
458 // build new DPB
459 for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
460 if (ref->flags & VA_PICTURE_H264_INVALID) {
461 continue;
462 }
463 #ifdef USE_AVC_SHORT_FORMAT
464 ref->picture_id = findSurface(ref);
465 #endif
466 dpb->poc = getPOC(ref);
467 // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
468 dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
469 if (dpb->surfaceBuffer == NULL) {
470 ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
471 // Error DecodeRefMissing is counted once even there're multiple
472 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
473 mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
474 if (dpb->poc == getPOC(&(picParam->CurrPic))) {
475 WTRACE("updateDPB: Using the current picture for missing reference.");
476 dpb->surfaceBuffer = mAcquiredBuffer;
477 } else if (mLastReference) {
478 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
479 // TODO: this is new code for error resilience
480 dpb->surfaceBuffer = mLastReference;
481 } else {
482 WTRACE("updateDPB: Unable to recover the missing reference frame.");
483 // continue buillding DPB without updating dpb pointer.
484 continue;
485 // continue building DPB as this reference may not be actually used.
486 // especially happen after seeking to a non-IDR I frame.
487 //return DECODE_NO_REFERENCE;
488 }
489 }
490 if (dpb->surfaceBuffer) {
491 // this surface is used as reference
492 dpb->surfaceBuffer->asReferernce = true;
493 }
494 dpb++;
495 }
496
497 // add current frame to DPB if it is a reference frame
498 if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
499 (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
500 dpb->poc = getPOC(&(picParam->CurrPic));
501 dpb->surfaceBuffer = mAcquiredBuffer;
502 dpb->surfaceBuffer->asReferernce = true;
503 }
504 // invalidate the current used DPB
505 invalidateDPB(mToggleDPB);
506 mToggleDPB = !mToggleDPB;
507 return DECODE_SUCCESS;
508 }
509
updateReferenceFrames(vbp_picture_data_h264 * picData)510 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
511 bool found = false;
512 uint32_t flags = 0;
513 VAPictureParameterBufferH264 *picParam = picData->pic_parms;
514 VASliceParameterBufferH264 *sliceParam = NULL;
515 uint8_t activeMinus1 = 0;
516 VAPictureH264 *refList = NULL;
517 VAPictureH264 *dpb = picParam->ReferenceFrames;
518 VAPictureH264 *refFrame = NULL;
519
520 for(int i = 0; i < picParam->num_ref_frames; i++) {
521 dpb->picture_id = findSurface(dpb);
522 dpb++;
523 }
524
525 return DECODE_SUCCESS;
526
527 // invalidate DPB in the picture buffer
528 memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
529 picParam->num_ref_frames = 0;
530
531 // update DPB from the reference list in each slice.
532 for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
533 sliceParam = &(picData->slc_data[slice].slc_parms);
534
535 for (int32_t list = 0; list < 2; list++) {
536 refList = (list == 0) ? sliceParam->RefPicList0 :
537 sliceParam->RefPicList1;
538 activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
539 sliceParam->num_ref_idx_l1_active_minus1;
540 if (activeMinus1 >= REF_LIST_SIZE) {
541 return DECODE_PARSER_FAIL;
542 }
543 for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
544 if (refList->flags & VA_PICTURE_H264_INVALID) {
545 break;
546 }
547 found = false;
548 refFrame = picParam->ReferenceFrames;
549 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
550 if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
551 ///check for complementary field
552 flags = refFrame->flags | refList->flags;
553 //If both TOP and BOTTOM are set, we'll clear those flags
554 if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
555 (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
556 refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
557 }
558 found = true; //already in the DPB; will not add this one
559 break;
560 }
561 }
562 if (found == false) {
563 // add a new reference to the DPB
564 dpb->picture_id = findSurface(refList);
565 if (dpb->picture_id == VA_INVALID_SURFACE) {
566 if (mLastReference != NULL) {
567 dpb->picture_id = mLastReference->renderBuffer.surface;
568 } else {
569 ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
570 return DECODE_NO_REFERENCE;
571 }
572 }
573 dpb->flags = refList->flags;
574 // if it's bottom field in dpb, there must have top field in DPB,
575 // so clear the bottom flag, or will confuse VED to address top field
576 if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
577 dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
578 dpb->frame_idx = refList->frame_idx;
579 dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
580 dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
581 dpb++;
582 picParam->num_ref_frames++;
583 }
584 }
585 }
586 }
587 return DECODE_SUCCESS;
588 }
589
removeReferenceFromDPB(VAPictureParameterBufferH264 * picParam)590 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
591 // remove the current frame from DPB as it can't be decoded.
592 if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
593 (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
594 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
595 int32_t poc = getPOC(&(picParam->CurrPic));
596 for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
597 if (poc == dpb->poc) {
598 dpb->poc = (int32_t)POC_DEFAULT;
599 if (dpb->surfaceBuffer) {
600 dpb->surfaceBuffer->asReferernce = false;
601 }
602 dpb->surfaceBuffer = NULL;
603 break;
604 }
605 }
606 }
607 }
608
getPOC(VAPictureH264 * pic)609 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
610 if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
611 return pic->BottomFieldOrderCnt;
612 }
613 return pic->TopFieldOrderCnt;
614 }
615
findSurface(VAPictureH264 * pic)616 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
617 VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
618 if (p == NULL) {
619 ETRACE("Could not find surface for poc %d", getPOC(pic));
620 return VA_INVALID_SURFACE;
621 }
622 return p->renderBuffer.surface;
623 }
624
findSurfaceBuffer(VAPictureH264 * pic)625 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
626 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
627 for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
628 if (dpb->poc == pic->BottomFieldOrderCnt ||
629 dpb->poc == pic->TopFieldOrderCnt) {
630 // TODO: remove these debugging codes
631 if (dpb->surfaceBuffer == NULL) {
632 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
633 }
634 return dpb->surfaceBuffer;
635 }
636 }
637 // ETRACE("Unable to find surface for poc %d", getPOC(pic));
638 return NULL;
639 }
640
findRefSurfaceBuffer(VAPictureH264 * pic)641 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
642 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
643 // always looking for the latest one in the DPB, in case ref frames have same POC
644 dpb += (DPB_SIZE - 1);
645 for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
646 if (dpb->poc == pic->BottomFieldOrderCnt ||
647 dpb->poc == pic->TopFieldOrderCnt) {
648 // TODO: remove these debugging codes
649 if (dpb->surfaceBuffer == NULL) {
650 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
651 }
652 return dpb->surfaceBuffer;
653 }
654 }
655 ETRACE("Unable to find surface for poc %d", getPOC(pic));
656 return NULL;
657 }
658
invalidateDPB(int toggle)659 void VideoDecoderAVC::invalidateDPB(int toggle) {
660 DecodedPictureBuffer* p = mDPBs[toggle];
661 for (int i = 0; i < DPB_SIZE; i++) {
662 p->poc = (int32_t) POC_DEFAULT;
663 p->surfaceBuffer = NULL;
664 p++;
665 }
666 }
667
clearAsReference(int toggle)668 void VideoDecoderAVC::clearAsReference(int toggle) {
669 DecodedPictureBuffer* p = mDPBs[toggle];
670 for (int i = 0; i < DPB_SIZE; i++) {
671 if (p->surfaceBuffer) {
672 p->surfaceBuffer->asReferernce = false;
673 }
674 p++;
675 }
676 }
677
startVA(vbp_data_h264 * data)678 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
679 int32_t DPBSize = getDPBSize(data);
680
681 //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
682 VAProfile vaProfile = VAProfileH264High;
683
684 // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
685 if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
686 data->codec_data->constraint_set1_flag == 1) {
687 if (mErrorConcealment) {
688 vaProfile = VAProfileH264ConstrainedBaseline;
689 }
690 }
691
692 VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
693 updateFormatInfo(data);
694
695 // for 1080p, limit the total surface to 19, according the hardware limitation
696 // change the max surface number from 19->10 to workaround memory shortage
697 // remove the workaround
698 if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
699 DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
700 }
701
702 if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) {
703 // When Adaptive playback is enabled, turn off low delay mode.
704 // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
705 enableLowDelayMode(false);
706 } else {
707 // for baseline profile, enable low delay mode automatically
708 enableLowDelayMode(data->codec_data->profile_idc == 66);
709 }
710
711 return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
712 }
713
updateFormatInfo(vbp_data_h264 * data)714 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
715 // new video size
716 uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
717 uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
718 ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
719 mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
720
721 if ((mVideoFormatInfo.width != width ||
722 mVideoFormatInfo.height != height) &&
723 width && height) {
724 if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
725 VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
726 mSizeChanged = true;
727 ITRACE("Video size is changed.");
728 }
729 mVideoFormatInfo.width = width;
730 mVideoFormatInfo.height = height;
731 }
732
733 // video_range has default value of 0.
734 mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
735
736 switch (data->codec_data->matrix_coefficients) {
737 case 1:
738 mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
739 break;
740
741 // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
742 // SMPTE 170M/BT601
743 case 5:
744 case 6:
745 mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
746 break;
747
748 default:
749 // unknown color matrix, set to 0 so color space flag will not be set.
750 mVideoFormatInfo.colorMatrix = 0;
751 break;
752 }
753 mVideoFormatInfo.aspectX = data->codec_data->sar_width;
754 mVideoFormatInfo.aspectY = data->codec_data->sar_height;
755 mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
756 mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
757 mVideoFormatInfo.cropRight = data->codec_data->crop_right;
758 mVideoFormatInfo.cropTop = data->codec_data->crop_top;
759 mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
760
761 ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
762 data->codec_data->crop_left,
763 data->codec_data->crop_top,
764 data->codec_data->crop_right,
765 data->codec_data->crop_bottom);
766
767 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
768 mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
769 } else {
770 // The number of actual buffer needed is
771 // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
772 // while outputQueue = DPB < 8? DPB :8
773 mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
774 + data->codec_data->num_ref_frames
775 #ifndef USE_GEN_HW
776 + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
777 + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
778 #endif
779 + 1;
780 }
781
782 ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
783
784 mVideoFormatInfo.valid = true;
785
786 setRenderRect();
787 }
788
isWiDiStatusChanged()789 bool VideoDecoderAVC::isWiDiStatusChanged() {
790 #ifndef USE_GEN_HW
791 if (mWiDiOn)
792 return false;
793
794 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
795 return false;
796
797 if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
798 return false;
799
800 char prop[PROPERTY_VALUE_MAX];
801 bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
802 (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
803 if (widi_on) {
804 mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
805 mWiDiOn = true;
806 ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
807 return true;
808 }
809 return false;
810 #else
811 return false;
812 #endif
813 }
814
handleNewSequence(vbp_data_h264 * data)815 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
816 updateFormatInfo(data);
817 bool needFlush = false;
818 bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
819
820 if (!rawDataMode) {
821 needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
822 || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
823 || isWiDiStatusChanged()
824 || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
825 }
826
827 if (needFlush || (rawDataMode && mSizeChanged)) {
828 mSizeChanged = false;
829 flushSurfaceBuffers();
830 return DECODE_FORMAT_CHANGE;
831 } else
832 return DECODE_SUCCESS;
833 }
834
isNewFrame(vbp_data_h264 * data,bool equalPTS)835 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
836 if (data->num_pictures == 0) {
837 ETRACE("num_pictures == 0");
838 return true;
839 }
840
841 vbp_picture_data_h264* picData = data->pic_data;
842 if (picData->num_slices == 0) {
843 ETRACE("num_slices == 0");
844 return true;
845 }
846
847 bool newFrame = false;
848 uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
849
850 if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
851 // not the first slice, assume it is continuation of a partial frame
852 // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
853 WTRACE("first_mb_in_slice != 0");
854 if (!equalPTS) {
855 // return true if different timestamp, it is a workaround here for a streaming case
856 WTRACE("different PTS, treat it as a new frame");
857 return true;
858 }
859 } else {
860 if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
861 ETRACE("Current picture has both odd field and even field.");
862 }
863 // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
864 // the last picture form an opposite field pair
865 if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
866 // opposite field
867 newFrame = false;
868 WTRACE("current picture is not at frame boundary.");
869 mLastPictureFlags = 0;
870 } else {
871 newFrame = true;
872 mLastPictureFlags = 0;
873 for (uint32_t i = 0; i < data->num_pictures; i++) {
874 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
875 }
876 if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
877 // current buffer contains both odd field and even field.
878 mLastPictureFlags = 0;
879 }
880 }
881 }
882
883 return newFrame;
884 }
885
getDPBSize(vbp_data_h264 * data)886 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
887 // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
888 struct DPBTable {
889 int32_t level;
890 float maxDPB;
891 } dpbTable[] = {
892 {9, 148.5},
893 {10, 148.5},
894 {11, 337.5},
895 {12, 891.0},
896 {13, 891.0},
897 {20, 891.0},
898 {21, 1782.0},
899 {22, 3037.5},
900 {30, 3037.5},
901 {31, 6750.0},
902 {32, 7680.0},
903 {40, 12288.0},
904 {41, 12288.0},
905 {42, 13056.0},
906 {50, 41400.0},
907 {51, 69120.0}
908 };
909
910 int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
911 float maxDPB = 0;
912 for (int32_t i = 0; i < count; i++)
913 {
914 if (dpbTable[i].level == data->codec_data->level_idc) {
915 maxDPB = dpbTable[i].maxDPB;
916 break;
917 }
918 }
919
920 int32_t maxDPBSize = maxDPB * 1024 / (
921 (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
922 (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
923 384);
924
925 if (maxDPBSize > 16) {
926 maxDPBSize = 16;
927 } else if (maxDPBSize == 0) {
928 maxDPBSize = 3;
929 }
930 if(maxDPBSize < data->codec_data->num_ref_frames) {
931 maxDPBSize = data->codec_data->num_ref_frames;
932 }
933
934 // add one extra frame for current frame.
935 maxDPBSize += 1;
936 ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
937 return maxDPBSize;
938 }
939
checkHardwareCapability()940 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
941 #ifndef USE_GEN_HW
942 VAStatus vaStatus;
943 VAConfigAttrib cfgAttribs[2];
944 cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
945 cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
946 vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
947 VAEntrypointVLD, cfgAttribs, 2);
948 CHECK_VA_STATUS("vaGetConfigAttributes");
949 if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
950 ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
951 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
952 return DECODE_DRIVER_FAIL;
953 }
954 #endif
955 return DECODE_SUCCESS;
956 }
957
958 #ifdef USE_AVC_SHORT_FORMAT
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)959 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
960 VAProfile profile, VAConfigID *config)
961 {
962 VAStatus vaStatus;
963 VAConfigAttrib attrib[2];
964
965 if (config == NULL) {
966 ETRACE("Invalid parameter!");
967 return DECODE_FAIL;
968 }
969
970 attrib[0].type = VAConfigAttribRTFormat;
971 attrib[0].value = VA_RT_FORMAT_YUV420;
972 attrib[1].type = VAConfigAttribDecSliceMode;
973 attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
974
975 vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
976
977 if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
978 ITRACE("AVC short format used");
979 attrib[1].value = VA_DEC_SLICE_MODE_BASE;
980 } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
981 ITRACE("AVC long format ssed");
982 attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
983 } else {
984 ETRACE("Unsupported Decode Slice Mode!");
985 return DECODE_FAIL;
986 }
987
988 vaStatus = vaCreateConfig(
989 mVADisplay,
990 profile,
991 VAEntrypointVLD,
992 &attrib[0],
993 2,
994 config);
995 CHECK_VA_STATUS("vaCreateConfig");
996
997 return DECODE_SUCCESS;
998 }
999 #endif
1000