1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderAVC.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <cutils/properties.h>
21
22 // Macros for actual buffer needed calculation
23 #define WIDI_CONSUMED 6
24 #define HDMI_CONSUMED 2
25 #define NW_CONSUMED 2
26 #define POC_DEFAULT 0x7FFFFFFF
27
28 #define MAX_PICTURE_WIDTH_AVC 4096
29 #define MAX_PICTURE_HEIGHT_AVC 4096
30
VideoDecoderAVC(const char * mimeType)31 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
32 : VideoDecoderBase(mimeType, VBP_H264),
33 mToggleDPB(0),
34 mErrorConcealment(false),
35 mAdaptive(false){
36
37 invalidateDPB(0);
38 invalidateDPB(1);
39 mLastPictureFlags = VA_PICTURE_H264_INVALID;
40 }
41
~VideoDecoderAVC()42 VideoDecoderAVC::~VideoDecoderAVC() {
43 stop();
44 }
45
start(VideoConfigBuffer * buffer)46 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
47 Decode_Status status;
48
49 status = VideoDecoderBase::start(buffer);
50 CHECK_STATUS("VideoDecoderBase::start");
51
52 // We don't want base class to manage reference.
53 VideoDecoderBase::ManageReference(false);
54 // output by picture order count
55 VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
56
57 mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
58 if (buffer->data == NULL || buffer->size == 0) {
59 WTRACE("No config data to start VA.");
60 if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
61 ITRACE("Used client supplied profile and surface to start VA.");
62 return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
63 }
64 return DECODE_SUCCESS;
65 }
66
67 vbp_data_h264 *data = NULL;
68 status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
69 CHECK_STATUS("VideoDecoderBase::parseBuffer");
70
71 if (data->codec_data->frame_width > MAX_PICTURE_WIDTH_AVC ||
72 data->codec_data->frame_height > MAX_PICTURE_HEIGHT_AVC) {
73 return DECODE_INVALID_DATA;
74 }
75
76 status = startVA(data);
77 return status;
78 }
79
stop(void)80 void VideoDecoderAVC::stop(void) {
81 // drop the last frame and ignore return value
82 endDecodingFrame(true);
83 VideoDecoderBase::stop();
84 invalidateDPB(0);
85 invalidateDPB(1);
86 mToggleDPB = 0;
87 mErrorConcealment = false;
88 mLastPictureFlags = VA_PICTURE_H264_INVALID;
89 }
90
flush(void)91 void VideoDecoderAVC::flush(void) {
92 // drop the frame and ignore return value
93 VideoDecoderBase::flush();
94 invalidateDPB(0);
95 invalidateDPB(1);
96 mToggleDPB = 0;
97 mLastPictureFlags = VA_PICTURE_H264_INVALID;
98 }
99
decode(VideoDecodeBuffer * buffer)100 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
101 Decode_Status status;
102 vbp_data_h264 *data = NULL;
103 if (buffer == NULL) {
104 return DECODE_INVALID_DATA;
105 }
106 status = VideoDecoderBase::parseBuffer(
107 buffer->data,
108 buffer->size,
109 false,
110 (void**)&data);
111 CHECK_STATUS("VideoDecoderBase::parseBuffer");
112
113 if (data->codec_data->frame_width > MAX_PICTURE_WIDTH_AVC ||
114 data->codec_data->frame_height > MAX_PICTURE_HEIGHT_AVC) {
115 return DECODE_INVALID_DATA;
116 }
117
118 if (!mVAStarted) {
119 if (data->has_sps && data->has_pps) {
120 status = startVA(data);
121 CHECK_STATUS("startVA");
122 } else {
123 WTRACE("Can't start VA as either SPS or PPS is still not available.");
124 return DECODE_SUCCESS;
125 }
126 }
127
128 VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
129
130 status = decodeFrame(buffer, data);
131 if (status == DECODE_MULTIPLE_FRAME) {
132 buffer->ext = &mExtensionBuffer;
133 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
134 mExtensionBuffer.extSize = sizeof(mPackedFrame);
135 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
136 }
137 return status;
138 }
139
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_h264 * data)140 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
141 Decode_Status status;
142 if (data->has_sps == 0 || data->has_pps == 0) {
143 return DECODE_NO_CONFIG;
144 }
145
146 mVideoFormatInfo.flags = 0;
147 uint32_t fieldFlags = 0;
148 for (unsigned int i = 0; i < data->num_pictures; i++) {
149 VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
150 fieldFlags |= pic.flags;
151 // Don't remove the following codes, it can be enabled for debugging DPB.
152 #if 0
153 VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
154 i,
155 buffer->timeStamp/1E6,
156 pic.TopFieldOrderCnt,
157 pic.BottomFieldOrderCnt,
158 pic.flags,
159 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
160 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
161 #endif
162 }
163 int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
164 int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
165 if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
166 mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
167 }
168
169 if (data->new_sps || data->new_pps) {
170 status = handleNewSequence(data);
171 CHECK_STATUS("handleNewSequence");
172 }
173
174 if (isWiDiStatusChanged()) {
175 mSizeChanged = false;
176 flushSurfaceBuffers();
177 return DECODE_FORMAT_CHANGE;
178 }
179
180 // first pic_data always exists, check if any slice is parsed
181 if (data->pic_data[0].num_slices == 0) {
182 ITRACE("No slice available for decoding.");
183 status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
184 mSizeChanged = false;
185 return status;
186 }
187
188 uint64_t lastPTS = mCurrentPTS;
189 mCurrentPTS = buffer->timeStamp;
190 //if (lastPTS != mCurrentPTS) {
191 if (isNewFrame(data, lastPTS == mCurrentPTS)) {
192 if (mLowDelay) {
193 // start decoding a new frame
194 status = beginDecodingFrame(data);
195 if (status != DECODE_SUCCESS) {
196 Decode_Status st = status;
197 // finish decoding the last frame if
198 // encounter error when decode the new frame
199 status = endDecodingFrame(false);
200 CHECK_STATUS("endDecodingFrame");
201 return st;
202 }
203 }
204
205 // finish decoding the last frame
206 status = endDecodingFrame(false);
207 CHECK_STATUS("endDecodingFrame");
208
209 if (!mLowDelay) {
210 // start decoding a new frame
211 status = beginDecodingFrame(data);
212 CHECK_STATUS("beginDecodingFrame");
213 }
214 } else {
215 status = continueDecodingFrame(data);
216 CHECK_STATUS("continueDecodingFrame");
217 }
218
219 // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
220 #if 0
221 if (buffer->flag & HAS_COMPLETE_FRAME) {
222 // finish decoding current frame
223 status = endDecodingFrame(false);
224 CHECK_STATUS("endDecodingFrame");
225 }
226 #endif
227 return DECODE_SUCCESS;
228 }
229
beginDecodingFrame(vbp_data_h264 * data)230 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
231 Decode_Status status;
232
233 status = acquireSurfaceBuffer();
234 CHECK_STATUS("acquireSurfaceBuffer");
235 VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
236 if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
237 (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
238 mAcquiredBuffer->referenceFrame = true;
239 } else {
240 mAcquiredBuffer->referenceFrame = false;
241 }
242 // set asReference in updateDPB
243
244 if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
245 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
246 } else {
247 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
248 }
249
250 // TODO: Set the discontinuity flag
251 mAcquiredBuffer->renderBuffer.flag = 0;
252 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
253 mAcquiredBuffer->pictureOrder = getPOC(picture);
254
255 if (mSizeChanged) {
256 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
257 mSizeChanged = false;
258 }
259
260 status = continueDecodingFrame(data);
261 // surface buffer is released if decode fails
262 return status;
263 }
264
265
continueDecodingFrame(vbp_data_h264 * data)266 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
267 Decode_Status status;
268 vbp_picture_data_h264 *picData = data->pic_data;
269
270 // TODO: remove these debugging codes
271 if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
272 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
273 return DECODE_FAIL;
274 }
275 for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
276 // sanity check
277 if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
278 return DECODE_PARSER_FAIL;
279 }
280
281 if (picIndex > 0 &&
282 (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
283 // it is a packed frame buffer
284 vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
285 vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
286 mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
287 mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
288 ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
289 return DECODE_MULTIPLE_FRAME;
290 }
291
292 for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
293 status = decodeSlice(data, picIndex, sliceIndex);
294 if (status != DECODE_SUCCESS) {
295 endDecodingFrame(true);
296 // TODO: this is new code
297 // remove current frame from DPB as it can't be decoded.
298 removeReferenceFromDPB(picData->pic_parms);
299 return status;
300 }
301 }
302 }
303 return DECODE_SUCCESS;
304 }
305
decodeSlice(vbp_data_h264 * data,uint32_t picIndex,uint32_t sliceIndex)306 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
307 Decode_Status status;
308 VAStatus vaStatus;
309 uint32_t bufferIDCount = 0;
310 // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
311 VABufferID bufferIDs[4];
312
313 vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
314 vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
315 VAPictureParameterBufferH264 *picParam = picData->pic_parms;
316 VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
317
318 if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
319 // either condition indicates start of a new frame
320 if (sliceParam->first_mb_in_slice != 0) {
321 WTRACE("The first slice is lost.");
322 // TODO: handle the first slice lost
323 }
324 if (mDecodingFrame) {
325 // interlace content, complete decoding the first field
326 vaStatus = vaEndPicture(mVADisplay, mVAContext);
327 CHECK_VA_STATUS("vaEndPicture");
328
329 // for interlace content, top field may be valid only after the second field is parsed
330 int32_t poc = getPOC(&(picParam->CurrPic));
331 if (poc < mAcquiredBuffer->pictureOrder) {
332 mAcquiredBuffer->pictureOrder = poc;
333 }
334 }
335
336 // Check there is no reference frame loss before decoding a frame
337
338 // Update the reference frames and surface IDs for DPB and current frame
339 status = updateDPB(picParam);
340 CHECK_STATUS("updateDPB");
341
342 #ifndef USE_AVC_SHORT_FORMAT
343 //We have to provide a hacked DPB rather than complete DPB for libva as workaround
344 status = updateReferenceFrames(picData);
345 CHECK_STATUS("updateReferenceFrames");
346 #endif
347 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
348 CHECK_VA_STATUS("vaBeginPicture");
349
350 // start decoding a frame
351 mDecodingFrame = true;
352
353 vaStatus = vaCreateBuffer(
354 mVADisplay,
355 mVAContext,
356 VAPictureParameterBufferType,
357 sizeof(VAPictureParameterBufferH264),
358 1,
359 picParam,
360 &bufferIDs[bufferIDCount]);
361 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
362 bufferIDCount++;
363
364 vaStatus = vaCreateBuffer(
365 mVADisplay,
366 mVAContext,
367 VAIQMatrixBufferType,
368 sizeof(VAIQMatrixBufferH264),
369 1,
370 data->IQ_matrix_buf,
371 &bufferIDs[bufferIDCount]);
372 CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
373 bufferIDCount++;
374 }
375
376 #ifndef USE_AVC_SHORT_FORMAT
377
378 status = setReference(sliceParam);
379 CHECK_STATUS("setReference");
380
381 vaStatus = vaCreateBuffer(
382 mVADisplay,
383 mVAContext,
384 VASliceParameterBufferType,
385 sizeof(VASliceParameterBufferH264),
386 1,
387 sliceParam,
388 &bufferIDs[bufferIDCount]);
389 #else
390 vaStatus = vaCreateBuffer(
391 mVADisplay,
392 mVAContext,
393 VASliceParameterBufferType,
394 sizeof(VASliceParameterBufferH264Base),
395 1,
396 sliceParam,
397 &bufferIDs[bufferIDCount]);
398 #endif
399 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
400 bufferIDCount++;
401
402 vaStatus = vaCreateBuffer(
403 mVADisplay,
404 mVAContext,
405 VASliceDataBufferType,
406 sliceData->slice_size, //size
407 1, //num_elements
408 sliceData->buffer_addr + sliceData->slice_offset,
409 &bufferIDs[bufferIDCount]);
410 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
411 bufferIDCount++;
412
413 vaStatus = vaRenderPicture(
414 mVADisplay,
415 mVAContext,
416 bufferIDs,
417 bufferIDCount);
418 CHECK_VA_STATUS("vaRenderPicture");
419
420 return DECODE_SUCCESS;
421 }
422
setReference(VASliceParameterBufferH264 * sliceParam)423 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
424 int32_t numList = 1;
425 // TODO: set numList to 0 if it is I slice
426 if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
427 // B slice
428 numList = 2;
429 }
430
431 int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
432 VAPictureH264 *ref = sliceParam->RefPicList0;
433
434 for (int32_t i = 0; i < numList; i++) {
435 if (activeMinus1 >= REF_LIST_SIZE) {
436 ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
437 return DECODE_PARSER_FAIL;
438 }
439 for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
440 if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
441 ref->picture_id = findSurface(ref);
442 if (ref->picture_id == VA_INVALID_SURFACE) {
443 // Error DecodeRefMissing is counted once even there're multiple
444 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
445 mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
446
447 if (mLastReference) {
448 WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
449 ref->picture_id = mLastReference->renderBuffer.surface;
450 } else {
451 ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
452 return DECODE_NO_REFERENCE;
453 }
454 }
455 }
456 }
457 activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
458 ref = sliceParam->RefPicList1;
459 }
460 return DECODE_SUCCESS;
461 }
462
updateDPB(VAPictureParameterBufferH264 * picParam)463 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
464 clearAsReference(mToggleDPB);
465 // pointer to toggled DPB (new)
466 DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
467 VAPictureH264 *ref = picParam->ReferenceFrames;
468
469 // update current picture ID
470 picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
471
472 // build new DPB
473 for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
474 if (ref->flags & VA_PICTURE_H264_INVALID) {
475 continue;
476 }
477 #ifdef USE_AVC_SHORT_FORMAT
478 ref->picture_id = findSurface(ref);
479 #endif
480 dpb->poc = getPOC(ref);
481 // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
482 dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
483 if (dpb->surfaceBuffer == NULL) {
484 ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
485 // Error DecodeRefMissing is counted once even there're multiple
486 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
487 mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
488 if (dpb->poc == getPOC(&(picParam->CurrPic))) {
489 WTRACE("updateDPB: Using the current picture for missing reference.");
490 dpb->surfaceBuffer = mAcquiredBuffer;
491 } else if (mLastReference) {
492 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
493 // TODO: this is new code for error resilience
494 dpb->surfaceBuffer = mLastReference;
495 } else {
496 WTRACE("updateDPB: Unable to recover the missing reference frame.");
497 // continue buillding DPB without updating dpb pointer.
498 continue;
499 // continue building DPB as this reference may not be actually used.
500 // especially happen after seeking to a non-IDR I frame.
501 //return DECODE_NO_REFERENCE;
502 }
503 }
504 if (dpb->surfaceBuffer) {
505 // this surface is used as reference
506 dpb->surfaceBuffer->asReferernce = true;
507 }
508 dpb++;
509 }
510
511 // add current frame to DPB if it is a reference frame
512 if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
513 (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
514 dpb->poc = getPOC(&(picParam->CurrPic));
515 dpb->surfaceBuffer = mAcquiredBuffer;
516 dpb->surfaceBuffer->asReferernce = true;
517 }
518 // invalidate the current used DPB
519 invalidateDPB(mToggleDPB);
520 mToggleDPB = !mToggleDPB;
521 return DECODE_SUCCESS;
522 }
523
updateReferenceFrames(vbp_picture_data_h264 * picData)524 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
525 bool found = false;
526 uint32_t flags = 0;
527 VAPictureParameterBufferH264 *picParam = picData->pic_parms;
528 VASliceParameterBufferH264 *sliceParam = NULL;
529 uint8_t activeMinus1 = 0;
530 VAPictureH264 *refList = NULL;
531 VAPictureH264 *dpb = picParam->ReferenceFrames;
532 VAPictureH264 *refFrame = NULL;
533
534 for(int i = 0; i < picParam->num_ref_frames; i++) {
535 dpb->picture_id = findSurface(dpb);
536 dpb++;
537 }
538
539 return DECODE_SUCCESS;
540
541 // invalidate DPB in the picture buffer
542 memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
543 picParam->num_ref_frames = 0;
544
545 // update DPB from the reference list in each slice.
546 for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
547 sliceParam = &(picData->slc_data[slice].slc_parms);
548
549 for (int32_t list = 0; list < 2; list++) {
550 refList = (list == 0) ? sliceParam->RefPicList0 :
551 sliceParam->RefPicList1;
552 activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
553 sliceParam->num_ref_idx_l1_active_minus1;
554 if (activeMinus1 >= REF_LIST_SIZE) {
555 return DECODE_PARSER_FAIL;
556 }
557 for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
558 if (refList->flags & VA_PICTURE_H264_INVALID) {
559 break;
560 }
561 found = false;
562 refFrame = picParam->ReferenceFrames;
563 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
564 if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
565 ///check for complementary field
566 flags = refFrame->flags | refList->flags;
567 //If both TOP and BOTTOM are set, we'll clear those flags
568 if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
569 (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
570 refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
571 }
572 found = true; //already in the DPB; will not add this one
573 break;
574 }
575 }
576 if (found == false) {
577 // add a new reference to the DPB
578 dpb->picture_id = findSurface(refList);
579 if (dpb->picture_id == VA_INVALID_SURFACE) {
580 if (mLastReference != NULL) {
581 dpb->picture_id = mLastReference->renderBuffer.surface;
582 } else {
583 ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
584 return DECODE_NO_REFERENCE;
585 }
586 }
587 dpb->flags = refList->flags;
588 // if it's bottom field in dpb, there must have top field in DPB,
589 // so clear the bottom flag, or will confuse VED to address top field
590 if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
591 dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
592 dpb->frame_idx = refList->frame_idx;
593 dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
594 dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
595 dpb++;
596 picParam->num_ref_frames++;
597 }
598 }
599 }
600 }
601 return DECODE_SUCCESS;
602 }
603
removeReferenceFromDPB(VAPictureParameterBufferH264 * picParam)604 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
605 // remove the current frame from DPB as it can't be decoded.
606 if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
607 (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
608 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
609 int32_t poc = getPOC(&(picParam->CurrPic));
610 for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
611 if (poc == dpb->poc) {
612 dpb->poc = (int32_t)POC_DEFAULT;
613 if (dpb->surfaceBuffer) {
614 dpb->surfaceBuffer->asReferernce = false;
615 }
616 dpb->surfaceBuffer = NULL;
617 break;
618 }
619 }
620 }
621 }
622
getPOC(VAPictureH264 * pic)623 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
624 if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
625 return pic->BottomFieldOrderCnt;
626 }
627 return pic->TopFieldOrderCnt;
628 }
629
findSurface(VAPictureH264 * pic)630 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
631 VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
632 if (p == NULL) {
633 ETRACE("Could not find surface for poc %d", getPOC(pic));
634 return VA_INVALID_SURFACE;
635 }
636 return p->renderBuffer.surface;
637 }
638
findSurfaceBuffer(VAPictureH264 * pic)639 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
640 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
641 for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
642 if (dpb->poc == pic->BottomFieldOrderCnt ||
643 dpb->poc == pic->TopFieldOrderCnt) {
644 // TODO: remove these debugging codes
645 if (dpb->surfaceBuffer == NULL) {
646 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
647 }
648 return dpb->surfaceBuffer;
649 }
650 }
651 // ETRACE("Unable to find surface for poc %d", getPOC(pic));
652 return NULL;
653 }
654
findRefSurfaceBuffer(VAPictureH264 * pic)655 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
656 DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
657 // always looking for the latest one in the DPB, in case ref frames have same POC
658 dpb += (DPB_SIZE - 1);
659 for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
660 if (dpb->poc == pic->BottomFieldOrderCnt ||
661 dpb->poc == pic->TopFieldOrderCnt) {
662 // TODO: remove these debugging codes
663 if (dpb->surfaceBuffer == NULL) {
664 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
665 }
666 return dpb->surfaceBuffer;
667 }
668 }
669 ETRACE("Unable to find surface for poc %d", getPOC(pic));
670 return NULL;
671 }
672
invalidateDPB(int toggle)673 void VideoDecoderAVC::invalidateDPB(int toggle) {
674 DecodedPictureBuffer* p = mDPBs[toggle];
675 for (int i = 0; i < DPB_SIZE; i++) {
676 p->poc = (int32_t) POC_DEFAULT;
677 p->surfaceBuffer = NULL;
678 p++;
679 }
680 }
681
clearAsReference(int toggle)682 void VideoDecoderAVC::clearAsReference(int toggle) {
683 DecodedPictureBuffer* p = mDPBs[toggle];
684 for (int i = 0; i < DPB_SIZE; i++) {
685 if (p->surfaceBuffer) {
686 p->surfaceBuffer->asReferernce = false;
687 }
688 p++;
689 }
690 }
691
startVA(vbp_data_h264 * data)692 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
693 int32_t DPBSize = getDPBSize(data);
694
695 //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
696 VAProfile vaProfile = VAProfileH264High;
697
698 if ((mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) || mAdaptive) {
699 // When Adaptive playback is enabled, turn off low delay mode.
700 // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
701 enableLowDelayMode(false);
702 } else {
703 // for baseline profile or constrained high profile, enable low delay mode automatically
704 enableLowDelayMode((data->codec_data->profile_idc == 66) || (data->codec_data->profile_idc == 100 && data->codec_data->constraint_set4_flag == 1 && data->codec_data->constraint_set5_flag == 1));
705 }
706
707 // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
708 if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
709 data->codec_data->constraint_set1_flag == 1) {
710 if (mErrorConcealment) {
711 vaProfile = VAProfileH264ConstrainedBaseline;
712 }
713 }
714
715 VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
716 updateFormatInfo(data);
717
718 // for 1080p, limit the total surface to 19, according the hardware limitation
719 // change the max surface number from 19->10 to workaround memory shortage
720 // remove the workaround
721 if(mVideoFormatInfo.surfaceHeight == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
722 DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
723 }
724
725 return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
726 }
727
updateFormatInfo(vbp_data_h264 * data)728 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
729 // new video size
730 uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
731 uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
732
733 if (data->codec_data->crop_top > 0)
734 height -= data->codec_data->crop_top;
735
736 if (data->codec_data->crop_bottom > 0)
737 height -= data->codec_data->crop_bottom;
738
739 if(data->codec_data->crop_left > 0)
740 width -= data->codec_data->crop_left;
741
742 if(data->codec_data->crop_right > 0)
743 width -= data->codec_data->crop_right;
744
745 ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
746 mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
747
748 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
749 pthread_mutex_lock(&mFormatLock);
750 }
751
752 if ((mVideoFormatInfo.width != width ||
753 mVideoFormatInfo.height != height) &&
754 width && height) {
755 if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
756 VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
757 mSizeChanged = true;
758 mAdaptive = true;
759 ITRACE("Video size is changed.");
760 }
761 mVideoFormatInfo.width = width;
762 mVideoFormatInfo.height = height;
763 }
764
765 // video_range has default value of 0.
766 mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
767
768 switch (data->codec_data->matrix_coefficients) {
769 case 1:
770 mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
771 break;
772
773 // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
774 // SMPTE 170M/BT601
775 case 5:
776 case 6:
777 mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
778 break;
779
780 default:
781 // unknown color matrix, set to 0 so color space flag will not be set.
782 mVideoFormatInfo.colorMatrix = 0;
783 break;
784 }
785 mVideoFormatInfo.aspectX = data->codec_data->sar_width;
786 mVideoFormatInfo.aspectY = data->codec_data->sar_height;
787 mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
788 mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
789 mVideoFormatInfo.cropRight = data->codec_data->crop_right;
790 mVideoFormatInfo.cropTop = data->codec_data->crop_top;
791 mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
792
793 ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
794 data->codec_data->crop_left,
795 data->codec_data->crop_top,
796 data->codec_data->crop_right,
797 data->codec_data->crop_bottom);
798
799 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
800 mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
801 } else {
802 // The number of actual buffer needed is
803 // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
804 // while outputQueue = DPB < 8? DPB :8
805 mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
806 + data->codec_data->num_ref_frames
807 #ifndef USE_GEN_HW
808 + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
809 + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
810 #endif
811 + 1;
812 }
813
814 ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
815
816 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
817 if (mSizeChanged
818 || isWiDiStatusChanged()
819 || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)) {
820 mVideoFormatInfo.valid = false;
821 } else {
822 mVideoFormatInfo.valid = true;
823 }
824
825 pthread_mutex_unlock(&mFormatLock);
826 } else {
827 mVideoFormatInfo.valid = true;
828 }
829
830 setRenderRect();
831 setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
832 }
833
isWiDiStatusChanged()834 bool VideoDecoderAVC::isWiDiStatusChanged() {
835 #ifndef USE_GEN_HW
836 if (mWiDiOn)
837 return false;
838
839 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
840 return false;
841
842 if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
843 return false;
844
845 char prop[PROPERTY_VALUE_MAX];
846 bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
847 (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
848 if (widi_on) {
849 mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
850 mWiDiOn = true;
851 ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
852 return true;
853 }
854 return false;
855 #else
856 return false;
857 #endif
858 }
859
handleNewSequence(vbp_data_h264 * data)860 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
861 Decode_Status status;
862 updateFormatInfo(data);
863
864 bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
865 if (rawDataMode && mSizeChanged) {
866 flushSurfaceBuffers();
867 mSizeChanged = false;
868 return DECODE_FORMAT_CHANGE;
869 }
870
871 bool needFlush = false;
872 if (!rawDataMode) {
873 if (mStoreMetaData) {
874 needFlush = mSizeChanged
875 || isWiDiStatusChanged()
876 || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
877 } else {
878 needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
879 || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
880 || isWiDiStatusChanged()
881 || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
882 }
883 }
884
885 if (needFlush) {
886 if (mStoreMetaData) {
887 status = endDecodingFrame(false);
888 CHECK_STATUS("endDecodingFrame");
889 } else {
890 flushSurfaceBuffers();
891 }
892 mSizeChanged = false;
893 return DECODE_FORMAT_CHANGE;
894 } else
895 return DECODE_SUCCESS;
896 }
897
isNewFrame(vbp_data_h264 * data,bool equalPTS)898 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
899 if (data->num_pictures == 0) {
900 ETRACE("num_pictures == 0");
901 return true;
902 }
903
904 vbp_picture_data_h264* picData = data->pic_data;
905 if (picData->num_slices == 0) {
906 ETRACE("num_slices == 0");
907 return true;
908 }
909
910 bool newFrame = false;
911 uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
912
913 if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
914 // not the first slice, assume it is continuation of a partial frame
915 // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
916 WTRACE("first_mb_in_slice != 0");
917 if (!equalPTS) {
918 // return true if different timestamp, it is a workaround here for a streaming case
919 WTRACE("different PTS, treat it as a new frame");
920 return true;
921 }
922 } else {
923 if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
924 ETRACE("Current picture has both odd field and even field.");
925 }
926 // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
927 // the last picture form an opposite field pair
928 if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
929 // opposite field
930 newFrame = false;
931 WTRACE("current picture is not at frame boundary.");
932 mLastPictureFlags = 0;
933 } else {
934 newFrame = true;
935 mLastPictureFlags = 0;
936 for (uint32_t i = 0; i < data->num_pictures; i++) {
937 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
938 }
939 if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
940 // current buffer contains both odd field and even field.
941 mLastPictureFlags = 0;
942 }
943 }
944 }
945
946 return newFrame;
947 }
948
getDPBSize(vbp_data_h264 * data)949 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
950 // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
951 struct DPBTable {
952 int32_t level;
953 float maxDPB;
954 } dpbTable[] = {
955 {9, 148.5},
956 {10, 148.5},
957 {11, 337.5},
958 {12, 891.0},
959 {13, 891.0},
960 {20, 891.0},
961 {21, 1782.0},
962 {22, 3037.5},
963 {30, 3037.5},
964 {31, 6750.0},
965 {32, 7680.0},
966 {40, 12288.0},
967 {41, 12288.0},
968 {42, 13056.0},
969 {50, 41400.0},
970 {51, 69120.0}
971 };
972
973 int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
974 float maxDPB = 0;
975 for (int32_t i = 0; i < count; i++)
976 {
977 if (dpbTable[i].level == data->codec_data->level_idc) {
978 maxDPB = dpbTable[i].maxDPB;
979 break;
980 }
981 }
982
983 int32_t maxDPBSize = maxDPB * 1024 / (
984 (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
985 (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
986 384);
987
988 if (maxDPBSize > 16) {
989 maxDPBSize = 16;
990 } else if (maxDPBSize == 0) {
991 maxDPBSize = 3;
992 }
993 if(maxDPBSize < data->codec_data->num_ref_frames) {
994 maxDPBSize = data->codec_data->num_ref_frames;
995 }
996
997 // add one extra frame for current frame.
998 maxDPBSize += 1;
999 ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
1000 return maxDPBSize;
1001 }
1002
checkHardwareCapability()1003 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
1004 #ifndef USE_GEN_HW
1005 VAStatus vaStatus;
1006 VAConfigAttrib cfgAttribs[2];
1007 cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
1008 cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
1009 vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
1010 VAEntrypointVLD, cfgAttribs, 2);
1011 CHECK_VA_STATUS("vaGetConfigAttributes");
1012 if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
1013 ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
1014 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
1015 return DECODE_DRIVER_FAIL;
1016 }
1017 #endif
1018 return DECODE_SUCCESS;
1019 }
1020
1021 #ifdef USE_AVC_SHORT_FORMAT
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)1022 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
1023 VAProfile profile, VAConfigID *config)
1024 {
1025 VAStatus vaStatus;
1026 VAConfigAttrib attrib[2];
1027
1028 if (config == NULL) {
1029 ETRACE("Invalid parameter!");
1030 return DECODE_FAIL;
1031 }
1032
1033 attrib[0].type = VAConfigAttribRTFormat;
1034 attrib[0].value = VA_RT_FORMAT_YUV420;
1035 attrib[1].type = VAConfigAttribDecSliceMode;
1036 attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
1037
1038 vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
1039
1040 if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
1041 ITRACE("AVC short format used");
1042 attrib[1].value = VA_DEC_SLICE_MODE_BASE;
1043 } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
1044 ITRACE("AVC long format ssed");
1045 attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
1046 } else {
1047 ETRACE("Unsupported Decode Slice Mode!");
1048 return DECODE_FAIL;
1049 }
1050
1051 vaStatus = vaCreateConfig(
1052 mVADisplay,
1053 profile,
1054 VAEntrypointVLD,
1055 &attrib[0],
1056 2,
1057 config);
1058 CHECK_VA_STATUS("vaCreateConfig");
1059
1060 return DECODE_SUCCESS;
1061 }
1062 #endif
1063