1 /*
2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 
17 #include "VideoDecoderAVC.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <cutils/properties.h>
21 
22 // Macros for actual buffer needed calculation
23 #define WIDI_CONSUMED   6
24 #define HDMI_CONSUMED   2
25 #define NW_CONSUMED     2
26 #define POC_DEFAULT     0x7FFFFFFF
27 
VideoDecoderAVC(const char * mimeType)28 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
29     : VideoDecoderBase(mimeType, VBP_H264),
30       mToggleDPB(0),
31       mErrorConcealment(false){
32 
33     invalidateDPB(0);
34     invalidateDPB(1);
35     mLastPictureFlags = VA_PICTURE_H264_INVALID;
36 }
37 
~VideoDecoderAVC()38 VideoDecoderAVC::~VideoDecoderAVC() {
39     stop();
40 }
41 
start(VideoConfigBuffer * buffer)42 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
43     Decode_Status status;
44 
45     status = VideoDecoderBase::start(buffer);
46     CHECK_STATUS("VideoDecoderBase::start");
47 
48     // We don't want base class to manage reference.
49     VideoDecoderBase::ManageReference(false);
50     // output by picture order count
51     VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
52 
53     mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
54     if (buffer->data == NULL || buffer->size == 0) {
55         WTRACE("No config data to start VA.");
56         if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
57             ITRACE("Used client supplied profile and surface to start VA.");
58             return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
59         }
60         return DECODE_SUCCESS;
61     }
62 
63     vbp_data_h264 *data = NULL;
64     status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
65     CHECK_STATUS("VideoDecoderBase::parseBuffer");
66 
67     status = startVA(data);
68     return status;
69 }
70 
stop(void)71 void VideoDecoderAVC::stop(void) {
72     // drop the last  frame and ignore return value
73     endDecodingFrame(true);
74     VideoDecoderBase::stop();
75     invalidateDPB(0);
76     invalidateDPB(1);
77     mToggleDPB = 0;
78     mErrorConcealment = false;
79     mLastPictureFlags = VA_PICTURE_H264_INVALID;
80 }
81 
flush(void)82 void VideoDecoderAVC::flush(void) {
83     // drop the frame and ignore return value
84     VideoDecoderBase::flush();
85     invalidateDPB(0);
86     invalidateDPB(1);
87     mToggleDPB = 0;
88     mLastPictureFlags = VA_PICTURE_H264_INVALID;
89 }
90 
decode(VideoDecodeBuffer * buffer)91 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
92     Decode_Status status;
93     vbp_data_h264 *data = NULL;
94     if (buffer == NULL) {
95         return DECODE_INVALID_DATA;
96     }
97     status =  VideoDecoderBase::parseBuffer(
98             buffer->data,
99             buffer->size,
100             false,
101             (void**)&data);
102     CHECK_STATUS("VideoDecoderBase::parseBuffer");
103 
104     if (!mVAStarted) {
105          if (data->has_sps && data->has_pps) {
106             status = startVA(data);
107             CHECK_STATUS("startVA");
108         } else {
109             WTRACE("Can't start VA as either SPS or PPS is still not available.");
110             return DECODE_SUCCESS;
111         }
112     }
113 
114     VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
115 
116     status = decodeFrame(buffer, data);
117     if (status == DECODE_MULTIPLE_FRAME) {
118         buffer->ext = &mExtensionBuffer;
119         mExtensionBuffer.extType = PACKED_FRAME_TYPE;
120         mExtensionBuffer.extSize = sizeof(mPackedFrame);
121         mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
122     }
123     return status;
124 }
125 
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_h264 * data)126 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
127     Decode_Status status;
128     if (data->has_sps == 0 || data->has_pps == 0) {
129         return DECODE_NO_CONFIG;
130     }
131 
132     mVideoFormatInfo.flags = 0;
133     uint32_t fieldFlags = 0;
134     for (unsigned int i = 0; i < data->num_pictures; i++) {
135         VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
136         fieldFlags |= pic.flags;
137         // Don't remove the following codes, it can be enabled for debugging DPB.
138 #if 0
139         VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d,  reference = %d",
140                 i,
141                 buffer->timeStamp/1E6,
142                 pic.TopFieldOrderCnt,
143                 pic.BottomFieldOrderCnt,
144                 pic.flags,
145                 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
146                 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
147 #endif
148     }
149     int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
150     int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
151     if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
152         mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
153     }
154 
155     if (data->new_sps || data->new_pps) {
156         status = handleNewSequence(data);
157         CHECK_STATUS("handleNewSequence");
158     }
159 
160     if (isWiDiStatusChanged()) {
161         mSizeChanged = false;
162         flushSurfaceBuffers();
163         return DECODE_FORMAT_CHANGE;
164     }
165 
166     // first pic_data always exists, check if any slice is parsed
167     if (data->pic_data[0].num_slices == 0) {
168         ITRACE("No slice available for decoding.");
169         status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
170         mSizeChanged = false;
171         return status;
172     }
173 
174     uint64_t lastPTS = mCurrentPTS;
175     mCurrentPTS = buffer->timeStamp;
176     //if (lastPTS != mCurrentPTS) {
177     if (isNewFrame(data, lastPTS == mCurrentPTS)) {
178         if (mLowDelay) {
179             // start decoding a new frame
180             status = beginDecodingFrame(data);
181             if (status != DECODE_SUCCESS) {
182                 Decode_Status st = status;
183                 // finish decoding the last frame if
184                 // encounter error when decode the new frame
185                 status = endDecodingFrame(false);
186                 CHECK_STATUS("endDecodingFrame");
187                 return st;
188             }
189         }
190 
191         // finish decoding the last frame
192         status = endDecodingFrame(false);
193         CHECK_STATUS("endDecodingFrame");
194 
195         if (!mLowDelay) {
196             // start decoding a new frame
197             status = beginDecodingFrame(data);
198             CHECK_STATUS("beginDecodingFrame");
199         }
200     } else {
201         status = continueDecodingFrame(data);
202         CHECK_STATUS("continueDecodingFrame");
203     }
204 
205     // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
206 #if 0
207     if (buffer->flag & HAS_COMPLETE_FRAME) {
208         // finish decoding current frame
209         status = endDecodingFrame(false);
210         CHECK_STATUS("endDecodingFrame");
211     }
212 #endif
213     return DECODE_SUCCESS;
214 }
215 
beginDecodingFrame(vbp_data_h264 * data)216 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
217     Decode_Status status;
218 
219     status = acquireSurfaceBuffer();
220     CHECK_STATUS("acquireSurfaceBuffer");
221     VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
222     if ((picture->flags  & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
223         (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
224         mAcquiredBuffer->referenceFrame = true;
225     } else {
226         mAcquiredBuffer->referenceFrame = false;
227     }
228     // set asReference in updateDPB
229 
230     if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
231         mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
232     } else {
233         mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
234     }
235 
236     // TODO: Set the discontinuity flag
237     mAcquiredBuffer->renderBuffer.flag = 0;
238     mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
239     mAcquiredBuffer->pictureOrder = getPOC(picture);
240 
241     if (mSizeChanged) {
242         mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
243         mSizeChanged = false;
244     }
245 
246     status  = continueDecodingFrame(data);
247     // surface buffer is released if decode fails
248     return status;
249 }
250 
251 
continueDecodingFrame(vbp_data_h264 * data)252 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
253     Decode_Status status;
254     vbp_picture_data_h264 *picData = data->pic_data;
255 
256     // TODO: remove these debugging codes
257     if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
258         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
259         return DECODE_FAIL;
260     }
261     for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
262         // sanity check
263         if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
264             return DECODE_PARSER_FAIL;
265         }
266 
267         if (picIndex > 0 &&
268             (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
269             // it is a packed frame buffer
270             vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
271             vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
272             mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
273             mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
274             ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
275             return DECODE_MULTIPLE_FRAME;
276         }
277 
278         for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
279             status = decodeSlice(data, picIndex, sliceIndex);
280             if (status != DECODE_SUCCESS) {
281                 endDecodingFrame(true);
282                 // TODO: this is new code
283                 // remove current frame from DPB as it can't be decoded.
284                 removeReferenceFromDPB(picData->pic_parms);
285                 return status;
286             }
287         }
288     }
289     return DECODE_SUCCESS;
290 }
291 
decodeSlice(vbp_data_h264 * data,uint32_t picIndex,uint32_t sliceIndex)292 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
293     Decode_Status status;
294     VAStatus vaStatus;
295     uint32_t bufferIDCount = 0;
296     // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
297     VABufferID bufferIDs[4];
298 
299     vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
300     vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
301     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
302     VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
303 
304     if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
305         // either condition indicates start of a new frame
306         if (sliceParam->first_mb_in_slice != 0) {
307             WTRACE("The first slice is lost.");
308             // TODO: handle the first slice lost
309         }
310         if (mDecodingFrame) {
311             // interlace content, complete decoding the first field
312             vaStatus = vaEndPicture(mVADisplay, mVAContext);
313             CHECK_VA_STATUS("vaEndPicture");
314 
315             // for interlace content, top field may be valid only after the second field is parsed
316             int32_t poc = getPOC(&(picParam->CurrPic));
317             if (poc < mAcquiredBuffer->pictureOrder) {
318                 mAcquiredBuffer->pictureOrder = poc;
319             }
320         }
321 
322         // Check there is no reference frame loss before decoding a frame
323 
324         // Update  the reference frames and surface IDs for DPB and current frame
325         status = updateDPB(picParam);
326         CHECK_STATUS("updateDPB");
327 
328 #ifndef USE_AVC_SHORT_FORMAT
329         //We have to provide a hacked DPB rather than complete DPB for libva as workaround
330         status = updateReferenceFrames(picData);
331         CHECK_STATUS("updateReferenceFrames");
332 #endif
333         vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
334         CHECK_VA_STATUS("vaBeginPicture");
335 
336         // start decoding a frame
337         mDecodingFrame = true;
338 
339         vaStatus = vaCreateBuffer(
340             mVADisplay,
341             mVAContext,
342             VAPictureParameterBufferType,
343             sizeof(VAPictureParameterBufferH264),
344             1,
345             picParam,
346             &bufferIDs[bufferIDCount]);
347         CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
348         bufferIDCount++;
349 
350         vaStatus = vaCreateBuffer(
351             mVADisplay,
352             mVAContext,
353             VAIQMatrixBufferType,
354             sizeof(VAIQMatrixBufferH264),
355             1,
356             data->IQ_matrix_buf,
357             &bufferIDs[bufferIDCount]);
358         CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
359         bufferIDCount++;
360     }
361 
362 #ifndef USE_AVC_SHORT_FORMAT
363 
364     status = setReference(sliceParam);
365     CHECK_STATUS("setReference");
366 
367     vaStatus = vaCreateBuffer(
368         mVADisplay,
369         mVAContext,
370         VASliceParameterBufferType,
371         sizeof(VASliceParameterBufferH264),
372         1,
373         sliceParam,
374         &bufferIDs[bufferIDCount]);
375 #else
376     vaStatus = vaCreateBuffer(
377         mVADisplay,
378         mVAContext,
379         VASliceParameterBufferType,
380         sizeof(VASliceParameterBufferH264Base),
381         1,
382         sliceParam,
383         &bufferIDs[bufferIDCount]);
384 #endif
385     CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
386     bufferIDCount++;
387 
388     vaStatus = vaCreateBuffer(
389         mVADisplay,
390         mVAContext,
391         VASliceDataBufferType,
392         sliceData->slice_size, //size
393         1,        //num_elements
394         sliceData->buffer_addr + sliceData->slice_offset,
395         &bufferIDs[bufferIDCount]);
396     CHECK_VA_STATUS("vaCreateSliceDataBuffer");
397     bufferIDCount++;
398 
399     vaStatus = vaRenderPicture(
400         mVADisplay,
401         mVAContext,
402         bufferIDs,
403         bufferIDCount);
404     CHECK_VA_STATUS("vaRenderPicture");
405 
406     return DECODE_SUCCESS;
407 }
408 
setReference(VASliceParameterBufferH264 * sliceParam)409 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
410     int32_t numList = 1;
411     // TODO: set numList to 0 if it is I slice
412     if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
413         // B slice
414         numList = 2;
415     }
416 
417     int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
418     VAPictureH264 *ref = sliceParam->RefPicList0;
419 
420     for (int32_t i = 0; i < numList; i++) {
421         if (activeMinus1 >= REF_LIST_SIZE) {
422             ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
423             return DECODE_PARSER_FAIL;
424         }
425         for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
426             if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
427                 ref->picture_id = findSurface(ref);
428                 if (ref->picture_id == VA_INVALID_SURFACE) {
429                     // Error DecodeRefMissing is counted once even there're multiple
430                     mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
431                     mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
432 
433                     if (mLastReference) {
434                         WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
435                         ref->picture_id = mLastReference->renderBuffer.surface;
436                     } else {
437                         ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
438                         return DECODE_NO_REFERENCE;
439                     }
440                 }
441             }
442         }
443         activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
444         ref = sliceParam->RefPicList1;
445     }
446     return DECODE_SUCCESS;
447 }
448 
updateDPB(VAPictureParameterBufferH264 * picParam)449 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
450     clearAsReference(mToggleDPB);
451     // pointer to toggled DPB (new)
452     DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
453     VAPictureH264 *ref = picParam->ReferenceFrames;
454 
455     // update current picture ID
456     picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
457 
458     // build new DPB
459     for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
460         if (ref->flags & VA_PICTURE_H264_INVALID) {
461             continue;
462         }
463 #ifdef USE_AVC_SHORT_FORMAT
464         ref->picture_id = findSurface(ref);
465 #endif
466         dpb->poc = getPOC(ref);
467         // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
468         dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
469         if (dpb->surfaceBuffer == NULL) {
470             ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
471             // Error DecodeRefMissing is counted once even there're multiple
472             mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
473             mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
474             if (dpb->poc == getPOC(&(picParam->CurrPic))) {
475                 WTRACE("updateDPB: Using the current picture for missing reference.");
476                 dpb->surfaceBuffer = mAcquiredBuffer;
477             } else if (mLastReference) {
478                 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
479                 // TODO: this is new code for error resilience
480                 dpb->surfaceBuffer = mLastReference;
481             } else {
482                 WTRACE("updateDPB: Unable to recover the missing reference frame.");
483                 // continue buillding DPB without updating dpb pointer.
484                 continue;
485                 // continue building DPB as this reference may not be actually used.
486                 // especially happen after seeking to a non-IDR I frame.
487                 //return DECODE_NO_REFERENCE;
488             }
489         }
490         if (dpb->surfaceBuffer) {
491             // this surface is used as reference
492             dpb->surfaceBuffer->asReferernce = true;
493         }
494         dpb++;
495     }
496 
497     // add current frame to DPB if it  is a reference frame
498     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
499         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
500         dpb->poc = getPOC(&(picParam->CurrPic));
501         dpb->surfaceBuffer = mAcquiredBuffer;
502         dpb->surfaceBuffer->asReferernce = true;
503     }
504     // invalidate the current used DPB
505     invalidateDPB(mToggleDPB);
506     mToggleDPB = !mToggleDPB;
507     return DECODE_SUCCESS;
508 }
509 
updateReferenceFrames(vbp_picture_data_h264 * picData)510 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
511     bool found = false;
512     uint32_t flags = 0;
513     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
514     VASliceParameterBufferH264 *sliceParam = NULL;
515     uint8_t activeMinus1 = 0;
516     VAPictureH264 *refList = NULL;
517     VAPictureH264 *dpb = picParam->ReferenceFrames;
518     VAPictureH264 *refFrame = NULL;
519 
520     for(int i = 0; i < picParam->num_ref_frames; i++) {
521         dpb->picture_id = findSurface(dpb);
522         dpb++;
523     }
524 
525     return DECODE_SUCCESS;
526 
527     // invalidate DPB in the picture buffer
528     memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
529     picParam->num_ref_frames = 0;
530 
531     // update DPB  from the reference list in each slice.
532     for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
533         sliceParam = &(picData->slc_data[slice].slc_parms);
534 
535         for (int32_t list = 0; list < 2; list++) {
536             refList = (list == 0) ? sliceParam->RefPicList0 :
537                                     sliceParam->RefPicList1;
538             activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
539                                          sliceParam->num_ref_idx_l1_active_minus1;
540             if (activeMinus1 >= REF_LIST_SIZE) {
541                 return DECODE_PARSER_FAIL;
542             }
543             for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
544                 if (refList->flags & VA_PICTURE_H264_INVALID) {
545                     break;
546                 }
547                 found = false;
548                 refFrame = picParam->ReferenceFrames;
549                 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
550                     if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
551                         ///check for complementary field
552                         flags = refFrame->flags | refList->flags;
553                         //If both TOP and BOTTOM are set, we'll clear those flags
554                         if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
555                             (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
556                             refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
557                         }
558                         found = true;  //already in the DPB; will not add this one
559                         break;
560                     }
561                 }
562                 if (found == false) {
563                     // add a new reference to the DPB
564                     dpb->picture_id = findSurface(refList);
565                     if (dpb->picture_id == VA_INVALID_SURFACE) {
566                         if (mLastReference != NULL) {
567                             dpb->picture_id = mLastReference->renderBuffer.surface;
568                         } else {
569                             ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
570                             return DECODE_NO_REFERENCE;
571                         }
572                     }
573                     dpb->flags = refList->flags;
574                     // if it's bottom field in dpb, there must have top field in DPB,
575                     // so clear the bottom flag, or will confuse VED to address top field
576                     if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
577                         dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
578                     dpb->frame_idx = refList->frame_idx;
579                     dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
580                     dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
581                     dpb++;
582                     picParam->num_ref_frames++;
583                 }
584             }
585         }
586     }
587     return DECODE_SUCCESS;
588 }
589 
removeReferenceFromDPB(VAPictureParameterBufferH264 * picParam)590 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
591     // remove the current frame from DPB as it can't be decoded.
592     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
593         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
594         DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
595         int32_t poc = getPOC(&(picParam->CurrPic));
596         for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
597             if (poc == dpb->poc) {
598                 dpb->poc = (int32_t)POC_DEFAULT;
599                 if (dpb->surfaceBuffer) {
600                     dpb->surfaceBuffer->asReferernce = false;
601                 }
602                 dpb->surfaceBuffer = NULL;
603                 break;
604             }
605         }
606     }
607 }
608 
getPOC(VAPictureH264 * pic)609 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
610     if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
611         return pic->BottomFieldOrderCnt;
612     }
613     return pic->TopFieldOrderCnt;
614 }
615 
findSurface(VAPictureH264 * pic)616 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
617     VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
618     if (p == NULL) {
619         ETRACE("Could not find surface for poc %d", getPOC(pic));
620         return VA_INVALID_SURFACE;
621     }
622     return p->renderBuffer.surface;
623 }
624 
findSurfaceBuffer(VAPictureH264 * pic)625 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
626     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
627     for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
628         if (dpb->poc == pic->BottomFieldOrderCnt ||
629             dpb->poc == pic->TopFieldOrderCnt) {
630             // TODO: remove these debugging codes
631             if (dpb->surfaceBuffer == NULL) {
632                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
633             }
634             return dpb->surfaceBuffer;
635         }
636     }
637     // ETRACE("Unable to find surface for poc %d", getPOC(pic));
638     return NULL;
639 }
640 
findRefSurfaceBuffer(VAPictureH264 * pic)641 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
642     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
643     // always looking for the latest one in the DPB, in case ref frames have same POC
644     dpb += (DPB_SIZE - 1);
645     for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
646         if (dpb->poc == pic->BottomFieldOrderCnt ||
647             dpb->poc == pic->TopFieldOrderCnt) {
648             // TODO: remove these debugging codes
649             if (dpb->surfaceBuffer == NULL) {
650                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
651             }
652             return dpb->surfaceBuffer;
653         }
654     }
655     ETRACE("Unable to find surface for poc %d", getPOC(pic));
656     return NULL;
657 }
658 
invalidateDPB(int toggle)659 void VideoDecoderAVC::invalidateDPB(int toggle) {
660     DecodedPictureBuffer* p = mDPBs[toggle];
661     for (int i = 0; i < DPB_SIZE; i++) {
662         p->poc = (int32_t) POC_DEFAULT;
663         p->surfaceBuffer = NULL;
664         p++;
665     }
666 }
667 
clearAsReference(int toggle)668 void VideoDecoderAVC::clearAsReference(int toggle) {
669     DecodedPictureBuffer* p = mDPBs[toggle];
670     for (int i = 0; i < DPB_SIZE; i++) {
671         if (p->surfaceBuffer) {
672             p->surfaceBuffer->asReferernce = false;
673         }
674         p++;
675     }
676 }
677 
startVA(vbp_data_h264 * data)678 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
679     int32_t DPBSize = getDPBSize(data);
680 
681     //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
682     VAProfile vaProfile = VAProfileH264High;
683 
684     if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) {
685         // When Adaptive playback is enabled, turn off low delay mode.
686         // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
687         enableLowDelayMode(false);
688     } else {
689         // for baseline profile or constrained high profile, enable low delay mode automatically
690         enableLowDelayMode((data->codec_data->profile_idc == 66) || (data->codec_data->profile_idc == 100 && data->codec_data->constraint_set4_flag == 1 && data->codec_data->constraint_set5_flag == 1));
691     }
692 
693     // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
694     if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
695         data->codec_data->constraint_set1_flag == 1) {
696         if (mErrorConcealment) {
697             vaProfile = VAProfileH264ConstrainedBaseline;
698         }
699     }
700 
701     VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
702     updateFormatInfo(data);
703 
704    // for 1080p, limit the total surface to 19, according the hardware limitation
705    // change the max surface number from 19->10 to workaround memory shortage
706    // remove the workaround
707     if(mVideoFormatInfo.surfaceHeight == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
708         DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
709     }
710 
711     return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
712 }
713 
updateFormatInfo(vbp_data_h264 * data)714 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
715     // new video size
716     uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
717     uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
718 
719     if (data->codec_data->crop_top > 0)
720         height -= data->codec_data->crop_top;
721 
722     if (data->codec_data->crop_bottom > 0)
723         height -= data->codec_data->crop_bottom;
724 
725     if(data->codec_data->crop_left > 0)
726         width -= data->codec_data->crop_left;
727 
728     if(data->codec_data->crop_right > 0)
729         width -= data->codec_data->crop_right;
730 
731     ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
732         mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
733 
734     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
735         pthread_mutex_lock(&mFormatLock);
736     }
737 
738     if ((mVideoFormatInfo.width != width ||
739         mVideoFormatInfo.height != height) &&
740         width && height) {
741         if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
742             VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
743             mSizeChanged = true;
744             ITRACE("Video size is changed.");
745         }
746         mVideoFormatInfo.width = width;
747         mVideoFormatInfo.height = height;
748     }
749 
750     // video_range has default value of 0.
751     mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
752 
753     switch (data->codec_data->matrix_coefficients) {
754         case 1:
755             mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
756             break;
757 
758         // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
759         // SMPTE 170M/BT601
760         case 5:
761         case 6:
762             mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
763             break;
764 
765         default:
766             // unknown color matrix, set to 0 so color space flag will not be set.
767             mVideoFormatInfo.colorMatrix = 0;
768             break;
769     }
770     mVideoFormatInfo.aspectX = data->codec_data->sar_width;
771     mVideoFormatInfo.aspectY = data->codec_data->sar_height;
772     mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
773     mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
774     mVideoFormatInfo.cropRight = data->codec_data->crop_right;
775     mVideoFormatInfo.cropTop = data->codec_data->crop_top;
776     mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
777 
778     ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
779         data->codec_data->crop_left,
780         data->codec_data->crop_top,
781         data->codec_data->crop_right,
782         data->codec_data->crop_bottom);
783 
784     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
785         mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
786     } else {
787         // The number of actual buffer needed is
788         // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
789         // while outputQueue = DPB < 8? DPB :8
790         mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
791                                               + data->codec_data->num_ref_frames
792 #ifndef USE_GEN_HW
793                                               + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
794                                               + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
795 #endif
796                                               + 1;
797     }
798 
799     ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
800 
801     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
802         if (mSizeChanged
803             || isWiDiStatusChanged()
804             || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)) {
805             mVideoFormatInfo.valid = false;
806         } else {
807             mVideoFormatInfo.valid = true;
808         }
809 
810         pthread_mutex_unlock(&mFormatLock);
811     } else {
812         mVideoFormatInfo.valid = true;
813     }
814 
815     setRenderRect();
816     setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
817 }
818 
isWiDiStatusChanged()819 bool VideoDecoderAVC::isWiDiStatusChanged() {
820 #ifndef USE_GEN_HW
821     if (mWiDiOn)
822         return false;
823 
824     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
825         return false;
826 
827     if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
828         return false;
829 
830     char prop[PROPERTY_VALUE_MAX];
831     bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
832                     (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
833     if (widi_on) {
834         mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
835         mWiDiOn = true;
836         ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
837         return true;
838     }
839     return false;
840 #else
841     return false;
842 #endif
843 }
844 
handleNewSequence(vbp_data_h264 * data)845 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
846     Decode_Status status;
847     updateFormatInfo(data);
848 
849     bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
850     if (rawDataMode && mSizeChanged) {
851         flushSurfaceBuffers();
852         mSizeChanged = false;
853         return DECODE_FORMAT_CHANGE;
854     }
855 
856     bool needFlush = false;
857     if (!rawDataMode) {
858         if (mStoreMetaData) {
859             needFlush = mSizeChanged
860                     || isWiDiStatusChanged()
861                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
862         } else {
863             needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
864                     || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
865                     || isWiDiStatusChanged()
866                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
867         }
868     }
869 
870     if (needFlush) {
871         if (mStoreMetaData) {
872             status = endDecodingFrame(false);
873             CHECK_STATUS("endDecodingFrame");
874         } else {
875             flushSurfaceBuffers();
876         }
877         mSizeChanged = false;
878         return DECODE_FORMAT_CHANGE;
879     } else
880         return DECODE_SUCCESS;
881 }
882 
isNewFrame(vbp_data_h264 * data,bool equalPTS)883 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
884     if (data->num_pictures == 0) {
885         ETRACE("num_pictures == 0");
886         return true;
887     }
888 
889     vbp_picture_data_h264* picData = data->pic_data;
890     if (picData->num_slices == 0) {
891         ETRACE("num_slices == 0");
892         return true;
893     }
894 
895     bool newFrame = false;
896     uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
897 
898     if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
899         // not the first slice, assume it is continuation of a partial frame
900         // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
901         WTRACE("first_mb_in_slice != 0");
902         if (!equalPTS) {
903             // return true if different timestamp, it is a workaround here for a streaming case
904             WTRACE("different PTS, treat it as a new frame");
905             return true;
906         }
907     } else {
908         if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
909             ETRACE("Current picture has both odd field and even field.");
910         }
911         // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
912         // the last picture form an opposite field pair
913         if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
914             // opposite field
915             newFrame = false;
916             WTRACE("current picture is not at frame boundary.");
917             mLastPictureFlags = 0;
918         } else {
919             newFrame = true;
920             mLastPictureFlags = 0;
921             for (uint32_t i = 0; i < data->num_pictures; i++) {
922                 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
923             }
924             if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
925                 // current buffer contains both odd field and even field.
926                 mLastPictureFlags = 0;
927             }
928         }
929     }
930 
931     return newFrame;
932 }
933 
getDPBSize(vbp_data_h264 * data)934 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
935     // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
936     struct DPBTable {
937         int32_t level;
938         float maxDPB;
939     } dpbTable[] = {
940         {9,  148.5},
941         {10, 148.5},
942         {11, 337.5},
943         {12, 891.0},
944         {13, 891.0},
945         {20, 891.0},
946         {21, 1782.0},
947         {22, 3037.5},
948         {30, 3037.5},
949         {31, 6750.0},
950         {32, 7680.0},
951         {40, 12288.0},
952         {41, 12288.0},
953         {42, 13056.0},
954         {50, 41400.0},
955         {51, 69120.0}
956     };
957 
958     int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
959     float maxDPB = 0;
960     for (int32_t i = 0; i < count; i++)
961     {
962         if (dpbTable[i].level == data->codec_data->level_idc) {
963             maxDPB = dpbTable[i].maxDPB;
964             break;
965         }
966     }
967 
968     int32_t maxDPBSize = maxDPB * 1024 / (
969         (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
970         (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
971         384);
972 
973     if (maxDPBSize > 16) {
974         maxDPBSize = 16;
975     } else if (maxDPBSize == 0) {
976         maxDPBSize = 3;
977     }
978     if(maxDPBSize < data->codec_data->num_ref_frames) {
979         maxDPBSize = data->codec_data->num_ref_frames;
980     }
981 
982     // add one extra frame for current frame.
983     maxDPBSize += 1;
984     ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
985     return maxDPBSize;
986 }
987 
checkHardwareCapability()988 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
989 #ifndef USE_GEN_HW
990     VAStatus vaStatus;
991     VAConfigAttrib cfgAttribs[2];
992     cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
993     cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
994     vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
995             VAEntrypointVLD, cfgAttribs, 2);
996     CHECK_VA_STATUS("vaGetConfigAttributes");
997     if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
998         ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
999                 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
1000         return DECODE_DRIVER_FAIL;
1001     }
1002 #endif
1003     return DECODE_SUCCESS;
1004 }
1005 
1006 #ifdef USE_AVC_SHORT_FORMAT
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)1007 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
1008     VAProfile profile, VAConfigID *config)
1009 {
1010     VAStatus vaStatus;
1011     VAConfigAttrib attrib[2];
1012 
1013     if (config == NULL) {
1014         ETRACE("Invalid parameter!");
1015         return DECODE_FAIL;
1016     }
1017 
1018     attrib[0].type = VAConfigAttribRTFormat;
1019     attrib[0].value = VA_RT_FORMAT_YUV420;
1020     attrib[1].type = VAConfigAttribDecSliceMode;
1021     attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
1022 
1023     vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
1024 
1025     if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
1026         ITRACE("AVC short format used");
1027         attrib[1].value = VA_DEC_SLICE_MODE_BASE;
1028     } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
1029         ITRACE("AVC long format ssed");
1030         attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
1031     } else {
1032         ETRACE("Unsupported Decode Slice Mode!");
1033         return DECODE_FAIL;
1034     }
1035 
1036     vaStatus = vaCreateConfig(
1037             mVADisplay,
1038             profile,
1039             VAEntrypointVLD,
1040             &attrib[0],
1041             2,
1042             config);
1043     CHECK_VA_STATUS("vaCreateConfig");
1044 
1045     return DECODE_SUCCESS;
1046 }
1047 #endif
1048