1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <va/va.h>
18 #include "VideoDecoderBase.h"
19 #include "VideoDecoderAVC.h"
20 #include "VideoDecoderTrace.h"
21 #include "vbp_loader.h"
22 #include "VideoDecoderAVCSecure.h"
23 #include "VideoFrameInfo.h"
24
25 #include <string.h>
26
27 #define MAX_SLICEHEADER_BUFFER_SIZE 4096
28 #define STARTCODE_PREFIX_LEN 3
29 #define NALU_TYPE_MASK 0x1F
30 #define MAX_NALU_HEADER_BUFFER 8192
31 static const uint8_t startcodePrefix[STARTCODE_PREFIX_LEN] = {0x00, 0x00, 0x01};
32
33 /* H264 start code values */
34 typedef enum _h264_nal_unit_type
35 {
36 h264_NAL_UNIT_TYPE_unspecified = 0,
37 h264_NAL_UNIT_TYPE_SLICE,
38 h264_NAL_UNIT_TYPE_DPA,
39 h264_NAL_UNIT_TYPE_DPB,
40 h264_NAL_UNIT_TYPE_DPC,
41 h264_NAL_UNIT_TYPE_IDR,
42 h264_NAL_UNIT_TYPE_SEI,
43 h264_NAL_UNIT_TYPE_SPS,
44 h264_NAL_UNIT_TYPE_PPS,
45 h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
46 h264_NAL_UNIT_TYPE_EOSeq,
47 h264_NAL_UNIT_TYPE_EOstream,
48 h264_NAL_UNIT_TYPE_filler_data,
49 h264_NAL_UNIT_TYPE_SPS_extension,
50 h264_NAL_UNIT_TYPE_ACP = 19,
51 h264_NAL_UNIT_TYPE_Slice_extension = 20
52 } h264_nal_unit_type_t;
53
VideoDecoderAVCSecure(const char * mimeType)54 VideoDecoderAVCSecure::VideoDecoderAVCSecure(const char *mimeType)
55 : VideoDecoderAVC(mimeType){
56 mFrameSize = 0;
57 mFrameData = NULL;
58 mIsEncryptData = 0;
59 mClearData = NULL;
60 mCachedHeader = NULL;
61 setParserType(VBP_H264SECURE);
62 mFrameIdx = 0;
63 mModularMode = 0;
64 mSliceNum = 0;
65 }
66
start(VideoConfigBuffer * buffer)67 Decode_Status VideoDecoderAVCSecure::start(VideoConfigBuffer *buffer) {
68 VTRACE("VideoDecoderAVCSecure::start");
69
70 Decode_Status status = VideoDecoderAVC::start(buffer);
71 if (status != DECODE_SUCCESS) {
72 return status;
73 }
74
75 mClearData = new uint8_t [MAX_NALU_HEADER_BUFFER];
76 if (mClearData == NULL) {
77 ETRACE("Failed to allocate memory for mClearData");
78 return DECODE_MEMORY_FAIL;
79 }
80
81 mCachedHeader= new uint8_t [MAX_SLICEHEADER_BUFFER_SIZE];
82 if (mCachedHeader == NULL) {
83 ETRACE("Failed to allocate memory for mCachedHeader");
84 return DECODE_MEMORY_FAIL;
85 }
86
87 return status;
88 }
89
stop(void)90 void VideoDecoderAVCSecure::stop(void) {
91 VTRACE("VideoDecoderAVCSecure::stop");
92 VideoDecoderAVC::stop();
93
94 if (mClearData) {
95 delete [] mClearData;
96 mClearData = NULL;
97 }
98
99 if (mCachedHeader) {
100 delete [] mCachedHeader;
101 mCachedHeader = NULL;
102 }
103 }
processModularInputBuffer(VideoDecodeBuffer * buffer,vbp_data_h264 ** data)104 Decode_Status VideoDecoderAVCSecure::processModularInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
105 {
106 VTRACE("processModularInputBuffer +++");
107 Decode_Status status;
108 int32_t clear_data_size = 0;
109 uint8_t *clear_data = NULL;
110
111 int32_t nalu_num = 0;
112 uint8_t nalu_type = 0;
113 int32_t nalu_offset = 0;
114 uint32_t nalu_size = 0;
115 uint8_t naluType = 0;
116 uint8_t *nalu_data = NULL;
117 uint32_t sliceidx = 0;
118
119 frame_info_t *pFrameInfo = NULL;
120 mSliceNum = 0;
121 memset(&mSliceInfo, 0, sizeof(mSliceInfo));
122 mIsEncryptData = 0;
123
124 if (buffer->flag & IS_SECURE_DATA) {
125 VTRACE("Decoding protected video ...");
126 pFrameInfo = (frame_info_t *) buffer->data;
127 if (pFrameInfo == NULL) {
128 ETRACE("Invalid parameter: pFrameInfo is NULL!");
129 return DECODE_MEMORY_FAIL;
130 }
131
132 mFrameData = (uint8_t *)pFrameInfo + pFrameInfo->data_offset_from_frameinfo;
133 mFrameSize = pFrameInfo->size;
134 VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
135
136 nalu_num = pFrameInfo->num_nalus;
137 VTRACE("nalu_num = %d", nalu_num);
138
139 if (nalu_num <= 0 || nalu_num >= MAX_NUM_NALUS) {
140 ETRACE("Invalid parameter: nalu_num = %d", nalu_num);
141 return DECODE_MEMORY_FAIL;
142 }
143
144 for (int32_t i = 0; i < nalu_num; i++) {
145
146 nalu_size = pFrameInfo->nalus[i].length;
147 nalu_type = pFrameInfo->nalus[i].type;
148 nalu_offset = pFrameInfo->nalus[i].offset;
149 nalu_data = ((uint8_t *)pFrameInfo) + pFrameInfo->nalus[i].data_offset_from_frameinfo;
150 naluType = nalu_type & NALU_TYPE_MASK;
151
152 VTRACE("nalu_type = 0x%x, nalu_size = %d, nalu_offset = 0x%x", nalu_type, nalu_size, nalu_offset);
153
154 // FIXME: this is a w/a to handle the case when two frame data was wrongly packed into one buffer
155 // especially IDR + Slice. let it gracefully quit.
156 if ((naluType == h264_NAL_UNIT_TYPE_SLICE) && (i > 0)) {
157 uint8_t former_naluType = pFrameInfo->nalus[i-1].type & NALU_TYPE_MASK;
158 if (former_naluType == h264_NAL_UNIT_TYPE_IDR) {
159 ETRACE("Invalid parameter: IDR slice + SLICE in one buffer");
160 break; // abandon this slice
161 }
162 }
163
164 if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
165
166 mIsEncryptData = 1;
167 VTRACE("slice idx = %d", sliceidx);
168 mSliceInfo[sliceidx].sliceHeaderByte = nalu_type;
169 mSliceInfo[sliceidx].sliceStartOffset = (nalu_offset >> 4) << 4;
170 mSliceInfo[sliceidx].sliceByteOffset = nalu_offset - mSliceInfo[sliceidx].sliceStartOffset;
171 mSliceInfo[sliceidx].sliceLength = mSliceInfo[sliceidx].sliceByteOffset + nalu_size;
172 mSliceInfo[sliceidx].sliceSize = (mSliceInfo[sliceidx].sliceByteOffset + nalu_size + 0xF) & ~0xF;
173 VTRACE("sliceHeaderByte = 0x%x", mSliceInfo[sliceidx].sliceHeaderByte);
174 VTRACE("sliceStartOffset = %d", mSliceInfo[sliceidx].sliceStartOffset);
175 VTRACE("sliceByteOffset = %d", mSliceInfo[sliceidx].sliceByteOffset);
176 VTRACE("sliceSize = %d", mSliceInfo[sliceidx].sliceSize);
177 VTRACE("sliceLength = %d", mSliceInfo[sliceidx].sliceLength);
178
179 #if 0
180 uint32_t testsize;
181 uint8_t *testdata;
182 testsize = mSliceInfo[sliceidx].sliceSize > 64 ? 64 : mSliceInfo[sliceidx].sliceSize ;
183 testdata = (uint8_t *)(mFrameData);
184 for (int i = 0; i < testsize; i++) {
185 VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
186 }
187 #endif
188 sliceidx++;
189
190 } else if (naluType == h264_NAL_UNIT_TYPE_SPS || naluType == h264_NAL_UNIT_TYPE_PPS) {
191 if (nalu_data == NULL) {
192 ETRACE("Invalid parameter: nalu_data = NULL for naluType 0x%x", naluType);
193 return DECODE_MEMORY_FAIL;
194 }
195 memcpy(mClearData + clear_data_size,
196 nalu_data,
197 nalu_size);
198 clear_data_size += nalu_size;
199 } else {
200 ITRACE("Nalu type = 0x%x is skipped", naluType);
201 continue;
202 }
203 }
204 clear_data = mClearData;
205 mSliceNum = sliceidx;
206
207 } else {
208 VTRACE("Decoding clear video ...");
209 mIsEncryptData = 0;
210 mFrameSize = buffer->size;
211 mFrameData = (uint8_t *)pFrameInfo + (int)pFrameInfo->data_offset_from_frameinfo;
212 clear_data = (uint8_t *)pFrameInfo + (int)pFrameInfo->data_offset_from_frameinfo;
213 clear_data_size = buffer->size;
214 }
215
216 if (clear_data_size > 0) {
217 status = VideoDecoderBase::parseBuffer(
218 clear_data,
219 clear_data_size,
220 false,
221 (void**)data);
222 CHECK_STATUS("VideoDecoderBase::parseBuffer");
223 } else {
224 status = VideoDecoderBase::queryBuffer((void**)data);
225 CHECK_STATUS("VideoDecoderBase::queryBuffer");
226 }
227 return DECODE_SUCCESS;
228 }
229
processClassicInputBuffer(VideoDecodeBuffer * buffer,vbp_data_h264 ** data)230 Decode_Status VideoDecoderAVCSecure::processClassicInputBuffer(VideoDecodeBuffer *buffer, vbp_data_h264 **data)
231 {
232 Decode_Status status;
233 int32_t clear_data_size = 0;
234 uint8_t *clear_data = NULL;
235 uint8_t naluType = 0;
236
237 int32_t num_nalus;
238 int32_t offset;
239 uint8_t *data_src;
240 uint8_t *nalu_data;
241 uint32_t nalu_size;
242
243 if (buffer->flag & IS_SECURE_DATA) {
244 VTRACE("Decoding protected video ...");
245 mIsEncryptData = 1;
246
247 mFrameData = buffer->data;
248 mFrameSize = buffer->size;
249 VTRACE("mFrameData = %p, mFrameSize = %d", mFrameData, mFrameSize);
250 num_nalus = *(uint32_t *)(buffer->data + buffer->size + sizeof(uint32_t));
251 VTRACE("num_nalus = %d", num_nalus);
252 offset = 4;
253 for (int32_t i = 0; i < num_nalus; i++) {
254 VTRACE("%d nalu, offset = %d", i, offset);
255 data_src = buffer->data + buffer->size + sizeof(uint32_t) + offset;
256 nalu_size = *(uint32_t *)(data_src + 2 * sizeof(uint32_t));
257 nalu_size = (nalu_size + 0x03) & (~0x03);
258
259 nalu_data = data_src + 3 *sizeof(uint32_t);
260 naluType = nalu_data[0] & NALU_TYPE_MASK;
261 offset += nalu_size + 3 *sizeof(uint32_t);
262 VTRACE("naluType = 0x%x", naluType);
263 VTRACE("nalu_size = %d, nalu_data = %p", nalu_size, nalu_data);
264
265 if (naluType >= h264_NAL_UNIT_TYPE_SLICE && naluType <= h264_NAL_UNIT_TYPE_IDR) {
266 ETRACE("Slice NALU received!");
267 return DECODE_INVALID_DATA;
268 }
269
270 else if (naluType >= h264_NAL_UNIT_TYPE_SEI && naluType <= h264_NAL_UNIT_TYPE_PPS) {
271 memcpy(mClearData + clear_data_size,
272 startcodePrefix,
273 STARTCODE_PREFIX_LEN);
274 clear_data_size += STARTCODE_PREFIX_LEN;
275 memcpy(mClearData + clear_data_size,
276 nalu_data,
277 nalu_size);
278 clear_data_size += nalu_size;
279 } else {
280 ETRACE("Failure: DECODE_FRAME_DROPPED");
281 return DECODE_FRAME_DROPPED;
282 }
283 }
284 clear_data = mClearData;
285 } else {
286 VTRACE("Decoding clear video ...");
287 mIsEncryptData = 0;
288 mFrameSize = buffer->size;
289 mFrameData = buffer->data;
290 clear_data = buffer->data;
291 clear_data_size = buffer->size;
292 }
293
294 if (clear_data_size > 0) {
295 status = VideoDecoderBase::parseBuffer(
296 clear_data,
297 clear_data_size,
298 false,
299 (void**)data);
300 CHECK_STATUS("VideoDecoderBase::parseBuffer");
301 } else {
302 status = VideoDecoderBase::queryBuffer((void**)data);
303 CHECK_STATUS("VideoDecoderBase::queryBuffer");
304 }
305 return DECODE_SUCCESS;
306 }
307
decode(VideoDecodeBuffer * buffer)308 Decode_Status VideoDecoderAVCSecure::decode(VideoDecodeBuffer *buffer) {
309 VTRACE("VideoDecoderAVCSecure::decode");
310 Decode_Status status;
311 vbp_data_h264 *data = NULL;
312 if (buffer == NULL) {
313 return DECODE_INVALID_DATA;
314 }
315
316 #if 0
317 uint32_t testsize;
318 uint8_t *testdata;
319 testsize = buffer->size > 16 ? 16:buffer->size ;
320 testdata = (uint8_t *)(buffer->data);
321 for (int i = 0; i < 16; i++) {
322 VTRACE("testdata[%d] = 0x%x", i, testdata[i]);
323 }
324 #endif
325 if (buffer->flag & IS_SUBSAMPLE_ENCRYPTION) {
326 mModularMode = 1;
327 }
328
329 if (mModularMode) {
330 status = processModularInputBuffer(buffer,&data);
331 CHECK_STATUS("processModularInputBuffer");
332 }
333 else {
334 status = processClassicInputBuffer(buffer,&data);
335 CHECK_STATUS("processClassicInputBuffer");
336 }
337
338 if (!mVAStarted) {
339 if (data->has_sps && data->has_pps) {
340 status = startVA(data);
341 CHECK_STATUS("startVA");
342 } else {
343 WTRACE("Can't start VA as either SPS or PPS is still not available.");
344 return DECODE_SUCCESS;
345 }
346 }
347
348 status = decodeFrame(buffer, data);
349
350 return status;
351 }
352
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_h264 * data)353 Decode_Status VideoDecoderAVCSecure::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
354 VTRACE("VideoDecoderAVCSecure::decodeFrame");
355 Decode_Status status;
356 VTRACE("data->has_sps = %d, data->has_pps = %d", data->has_sps, data->has_pps);
357
358 #if 0
359 // Don't remove the following codes, it can be enabled for debugging DPB.
360 for (unsigned int i = 0; i < data->num_pictures; i++) {
361 VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
362 VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d, reference = %d",
363 i,
364 buffer->timeStamp/1E6,
365 pic.TopFieldOrderCnt,
366 pic.BottomFieldOrderCnt,
367 pic.flags,
368 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
369 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
370 }
371 #endif
372
373 if (data->new_sps || data->new_pps) {
374 status = handleNewSequence(data);
375 CHECK_STATUS("handleNewSequence");
376 }
377
378 if (mModularMode && (!mIsEncryptData)) {
379 if (data->pic_data[0].num_slices == 0) {
380 ITRACE("No slice available for decoding.");
381 status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
382 mSizeChanged = false;
383 return status;
384 }
385 }
386
387 uint64_t lastPTS = mCurrentPTS;
388 mCurrentPTS = buffer->timeStamp;
389
390 // start decoding a new frame
391 status = acquireSurfaceBuffer();
392 CHECK_STATUS("acquireSurfaceBuffer");
393
394 if (mModularMode) {
395 status = parseModularSliceHeader(data);
396 if (status != DECODE_SUCCESS)
397 status = parseModularSliceHeader(data);
398 }
399 else {
400 status = parseClassicSliceHeader(data);
401 }
402
403 if (status != DECODE_SUCCESS) {
404 endDecodingFrame(true);
405 if (status == DECODE_PARSER_FAIL) {
406 ETRACE("parse frame failed with DECODE_PARSER_FAIL");
407 status = DECODE_INVALID_DATA;
408 }
409 return status;
410 }
411
412 status = beginDecodingFrame(data);
413 CHECK_STATUS("beginDecodingFrame");
414
415 // finish decoding the last frame
416 status = endDecodingFrame(false);
417 CHECK_STATUS("endDecodingFrame");
418
419 if (isNewFrame(data, lastPTS == mCurrentPTS) == 0) {
420 ETRACE("Can't handle interlaced frames yet");
421 return DECODE_FAIL;
422 }
423
424 return DECODE_SUCCESS;
425 }
426
beginDecodingFrame(vbp_data_h264 * data)427 Decode_Status VideoDecoderAVCSecure::beginDecodingFrame(vbp_data_h264 *data) {
428 VTRACE("VideoDecoderAVCSecure::beginDecodingFrame");
429 Decode_Status status;
430 VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
431 if ((picture->flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
432 (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
433 mAcquiredBuffer->referenceFrame = true;
434 } else {
435 mAcquiredBuffer->referenceFrame = false;
436 }
437
438 if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
439 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
440 } else {
441 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
442 }
443
444 mAcquiredBuffer->renderBuffer.flag = 0;
445 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
446 mAcquiredBuffer->pictureOrder = getPOC(picture);
447
448 if (mSizeChanged) {
449 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
450 mSizeChanged = false;
451 }
452
453 status = continueDecodingFrame(data);
454 return status;
455 }
456
continueDecodingFrame(vbp_data_h264 * data)457 Decode_Status VideoDecoderAVCSecure::continueDecodingFrame(vbp_data_h264 *data) {
458 VTRACE("VideoDecoderAVCSecure::continueDecodingFrame");
459 Decode_Status status;
460 vbp_picture_data_h264 *picData = data->pic_data;
461
462 if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
463 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
464 return DECODE_FAIL;
465 }
466 VTRACE("data->num_pictures = %d", data->num_pictures);
467 for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
468 if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
469 return DECODE_PARSER_FAIL;
470 }
471
472 if (picIndex > 0 &&
473 (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
474 ETRACE("Packed frame is not supported yet!");
475 return DECODE_FAIL;
476 }
477 VTRACE("picData->num_slices = %d", picData->num_slices);
478 for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
479 status = decodeSlice(data, picIndex, sliceIndex);
480 if (status != DECODE_SUCCESS) {
481 endDecodingFrame(true);
482 // remove current frame from DPB as it can't be decoded.
483 removeReferenceFromDPB(picData->pic_parms);
484 return status;
485 }
486 }
487 }
488 mDecodingFrame = true;
489
490 return DECODE_SUCCESS;
491 }
492
parseClassicSliceHeader(vbp_data_h264 * data)493 Decode_Status VideoDecoderAVCSecure::parseClassicSliceHeader(vbp_data_h264 *data) {
494 Decode_Status status;
495 VAStatus vaStatus;
496
497 VABufferID sliceheaderbufferID;
498 VABufferID pictureparameterparsingbufferID;
499 VABufferID mSlicebufferID;
500
501 if (mFrameSize <= 0) {
502 return DECODE_SUCCESS;
503 }
504 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
505 CHECK_VA_STATUS("vaBeginPicture");
506
507 vaStatus = vaCreateBuffer(
508 mVADisplay,
509 mVAContext,
510 VAParseSliceHeaderGroupBufferType,
511 MAX_SLICEHEADER_BUFFER_SIZE,
512 1,
513 NULL,
514 &sliceheaderbufferID);
515 CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
516
517 void *sliceheaderbuf;
518 vaStatus = vaMapBuffer(
519 mVADisplay,
520 sliceheaderbufferID,
521 &sliceheaderbuf);
522 CHECK_VA_STATUS("vaMapBuffer");
523
524 memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
525
526 vaStatus = vaUnmapBuffer(
527 mVADisplay,
528 sliceheaderbufferID);
529 CHECK_VA_STATUS("vaUnmapBuffer");
530
531
532 vaStatus = vaCreateBuffer(
533 mVADisplay,
534 mVAContext,
535 VASliceDataBufferType,
536 mFrameSize, //size
537 1, //num_elements
538 mFrameData,
539 &mSlicebufferID);
540 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
541
542 data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
543 data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
544 data->pic_parse_buffer->frame_size = mFrameSize;
545 data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
546
547 #if 0
548
549 VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
550 VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
551 VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
552 VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
553 VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
554 VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
555 VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
556 VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
557
558 VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
559 VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
560 VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
561 VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
562 VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
563 VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
564 VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
565 VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
566 #endif
567
568 vaStatus = vaCreateBuffer(
569 mVADisplay,
570 mVAContext,
571 VAParsePictureParameterBufferType,
572 sizeof(VAParsePictureParameterBuffer),
573 1,
574 data->pic_parse_buffer,
575 &pictureparameterparsingbufferID);
576 CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
577
578 vaStatus = vaRenderPicture(
579 mVADisplay,
580 mVAContext,
581 &pictureparameterparsingbufferID,
582 1);
583 CHECK_VA_STATUS("vaRenderPicture");
584
585 vaStatus = vaMapBuffer(
586 mVADisplay,
587 sliceheaderbufferID,
588 &sliceheaderbuf);
589 CHECK_VA_STATUS("vaMapBuffer");
590
591 status = updateSliceParameter(data,sliceheaderbuf);
592 CHECK_STATUS("processSliceHeader");
593
594 vaStatus = vaUnmapBuffer(
595 mVADisplay,
596 sliceheaderbufferID);
597 CHECK_VA_STATUS("vaUnmapBuffer");
598
599 return DECODE_SUCCESS;
600 }
601
parseModularSliceHeader(vbp_data_h264 * data)602 Decode_Status VideoDecoderAVCSecure::parseModularSliceHeader(vbp_data_h264 *data) {
603 Decode_Status status;
604 VAStatus vaStatus;
605
606 VABufferID sliceheaderbufferID;
607 VABufferID pictureparameterparsingbufferID;
608 VABufferID mSlicebufferID;
609 int32_t sliceIdx;
610
611 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
612 CHECK_VA_STATUS("vaBeginPicture");
613
614 if (mFrameSize <= 0 || mSliceNum <=0) {
615 return DECODE_SUCCESS;
616 }
617 void *sliceheaderbuf;
618 memset(mCachedHeader, 0, MAX_SLICEHEADER_BUFFER_SIZE);
619 int32_t offset = 0;
620 int32_t size = 0;
621
622 for (sliceIdx = 0; sliceIdx < mSliceNum; sliceIdx++) {
623 vaStatus = vaCreateBuffer(
624 mVADisplay,
625 mVAContext,
626 VAParseSliceHeaderGroupBufferType,
627 MAX_SLICEHEADER_BUFFER_SIZE,
628 1,
629 NULL,
630 &sliceheaderbufferID);
631 CHECK_VA_STATUS("vaCreateSliceHeaderGroupBuffer");
632
633 vaStatus = vaMapBuffer(
634 mVADisplay,
635 sliceheaderbufferID,
636 &sliceheaderbuf);
637 CHECK_VA_STATUS("vaMapBuffer");
638
639 memset(sliceheaderbuf, 0, MAX_SLICEHEADER_BUFFER_SIZE);
640
641 vaStatus = vaUnmapBuffer(
642 mVADisplay,
643 sliceheaderbufferID);
644 CHECK_VA_STATUS("vaUnmapBuffer");
645
646 vaStatus = vaCreateBuffer(
647 mVADisplay,
648 mVAContext,
649 VASliceDataBufferType,
650 mSliceInfo[sliceIdx].sliceSize, //size
651 1, //num_elements
652 mFrameData + mSliceInfo[sliceIdx].sliceStartOffset,
653 &mSlicebufferID);
654 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
655
656 data->pic_parse_buffer->frame_buf_id = mSlicebufferID;
657 data->pic_parse_buffer->slice_headers_buf_id = sliceheaderbufferID;
658 data->pic_parse_buffer->frame_size = mSliceInfo[sliceIdx].sliceLength;
659 data->pic_parse_buffer->slice_headers_size = MAX_SLICEHEADER_BUFFER_SIZE;
660 data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte;
661 data->pic_parse_buffer->slice_offset = mSliceInfo[sliceIdx].sliceByteOffset;
662
663 #if 0
664 VTRACE("data->pic_parse_buffer->slice_offset = 0x%x", data->pic_parse_buffer->slice_offset);
665 VTRACE("pic_parse_buffer->nalu_header.value = %x", data->pic_parse_buffer->nalu_header.value = mSliceInfo[sliceIdx].sliceHeaderByte);
666 VTRACE("flags.bits.frame_mbs_only_flag = %d", data->pic_parse_buffer->flags.bits.frame_mbs_only_flag);
667 VTRACE("flags.bits.pic_order_present_flag = %d", data->pic_parse_buffer->flags.bits.pic_order_present_flag);
668 VTRACE("flags.bits.delta_pic_order_always_zero_flag = %d", data->pic_parse_buffer->flags.bits.delta_pic_order_always_zero_flag);
669 VTRACE("flags.bits.redundant_pic_cnt_present_flag = %d", data->pic_parse_buffer->flags.bits.redundant_pic_cnt_present_flag);
670 VTRACE("flags.bits.weighted_pred_flag = %d", data->pic_parse_buffer->flags.bits.weighted_pred_flag);
671 VTRACE("flags.bits.entropy_coding_mode_flag = %d", data->pic_parse_buffer->flags.bits.entropy_coding_mode_flag);
672 VTRACE("flags.bits.deblocking_filter_control_present_flag = %d", data->pic_parse_buffer->flags.bits.deblocking_filter_control_present_flag);
673 VTRACE("flags.bits.weighted_bipred_idc = %d", data->pic_parse_buffer->flags.bits.weighted_bipred_idc);
674 VTRACE("pic_parse_buffer->expected_pic_parameter_set_id = %d", data->pic_parse_buffer->expected_pic_parameter_set_id);
675 VTRACE("pic_parse_buffer->num_slice_groups_minus1 = %d", data->pic_parse_buffer->num_slice_groups_minus1);
676 VTRACE("pic_parse_buffer->chroma_format_idc = %d", data->pic_parse_buffer->chroma_format_idc);
677 VTRACE("pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4 = %d", data->pic_parse_buffer->log2_max_pic_order_cnt_lsb_minus4);
678 VTRACE("pic_parse_buffer->pic_order_cnt_type = %d", data->pic_parse_buffer->pic_order_cnt_type);
679 VTRACE("pic_parse_buffer->residual_colour_transform_flag = %d", data->pic_parse_buffer->residual_colour_transform_flag);
680 VTRACE("pic_parse_buffer->num_ref_idc_l0_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l0_active_minus1);
681 VTRACE("pic_parse_buffer->num_ref_idc_l1_active_minus1 = %d", data->pic_parse_buffer->num_ref_idc_l1_active_minus1);
682 #endif
683 vaStatus = vaCreateBuffer(
684 mVADisplay,
685 mVAContext,
686 VAParsePictureParameterBufferType,
687 sizeof(VAParsePictureParameterBuffer),
688 1,
689 data->pic_parse_buffer,
690 &pictureparameterparsingbufferID);
691 CHECK_VA_STATUS("vaCreatePictureParameterParsingBuffer");
692
693 vaStatus = vaRenderPicture(
694 mVADisplay,
695 mVAContext,
696 &pictureparameterparsingbufferID,
697 1);
698 CHECK_VA_STATUS("vaRenderPicture");
699
700 vaStatus = vaMapBuffer(
701 mVADisplay,
702 sliceheaderbufferID,
703 &sliceheaderbuf);
704 CHECK_VA_STATUS("vaMapBuffer");
705
706 size = *(uint32 *)((uint8 *)sliceheaderbuf + 4) + 4;
707 VTRACE("slice header size = 0x%x, offset = 0x%x", size, offset);
708 if (offset + size <= MAX_SLICEHEADER_BUFFER_SIZE - 4) {
709 memcpy(mCachedHeader+offset, sliceheaderbuf, size);
710 offset += size;
711 } else {
712 WTRACE("Cached slice header is not big enough!");
713 }
714 vaStatus = vaUnmapBuffer(
715 mVADisplay,
716 sliceheaderbufferID);
717 CHECK_VA_STATUS("vaUnmapBuffer");
718 }
719 memset(mCachedHeader + offset, 0xFF, 4);
720 status = updateSliceParameter(data,mCachedHeader);
721 CHECK_STATUS("processSliceHeader");
722 return DECODE_SUCCESS;
723 }
724
725
updateSliceParameter(vbp_data_h264 * data,void * sliceheaderbuf)726 Decode_Status VideoDecoderAVCSecure::updateSliceParameter(vbp_data_h264 *data, void *sliceheaderbuf) {
727 VTRACE("VideoDecoderAVCSecure::updateSliceParameter");
728 Decode_Status status;
729 status = VideoDecoderBase::updateBuffer(
730 (uint8_t *)sliceheaderbuf,
731 MAX_SLICEHEADER_BUFFER_SIZE,
732 (void**)&data);
733 CHECK_STATUS("updateBuffer");
734 return DECODE_SUCCESS;
735 }
736
decodeSlice(vbp_data_h264 * data,uint32_t picIndex,uint32_t sliceIndex)737 Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
738 Decode_Status status;
739 VAStatus vaStatus;
740 uint32_t bufferIDCount = 0;
741 // maximum 3 buffers to render a slice: picture parameter, IQMatrix, slice parameter
742 VABufferID bufferIDs[3];
743
744 vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
745 vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
746 VAPictureParameterBufferH264 *picParam = picData->pic_parms;
747 VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
748 uint32_t slice_data_size = 0;
749 uint8_t* slice_data_addr = NULL;
750
751 if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
752 // either condition indicates start of a new frame
753 if (sliceParam->first_mb_in_slice != 0) {
754 WTRACE("The first slice is lost.");
755 }
756 VTRACE("Current frameidx = %d", mFrameIdx++);
757 // Update the reference frames and surface IDs for DPB and current frame
758 status = updateDPB(picParam);
759 CHECK_STATUS("updateDPB");
760
761 //We have to provide a hacked DPB rather than complete DPB for libva as workaround
762 status = updateReferenceFrames(picData);
763 CHECK_STATUS("updateReferenceFrames");
764
765 mDecodingFrame = true;
766
767 vaStatus = vaCreateBuffer(
768 mVADisplay,
769 mVAContext,
770 VAPictureParameterBufferType,
771 sizeof(VAPictureParameterBufferH264),
772 1,
773 picParam,
774 &bufferIDs[bufferIDCount]);
775 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
776 bufferIDCount++;
777
778 vaStatus = vaCreateBuffer(
779 mVADisplay,
780 mVAContext,
781 VAIQMatrixBufferType,
782 sizeof(VAIQMatrixBufferH264),
783 1,
784 data->IQ_matrix_buf,
785 &bufferIDs[bufferIDCount]);
786 CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
787 bufferIDCount++;
788 }
789
790 status = setReference(sliceParam);
791 CHECK_STATUS("setReference");
792
793 if (mModularMode) {
794 if (mIsEncryptData) {
795 sliceParam->slice_data_size = mSliceInfo[sliceIndex].sliceSize;
796 slice_data_size = mSliceInfo[sliceIndex].sliceSize;
797 slice_data_addr = mFrameData + mSliceInfo[sliceIndex].sliceStartOffset;
798 } else {
799 slice_data_size = sliceData->slice_size;
800 slice_data_addr = sliceData->buffer_addr + sliceData->slice_offset;
801 }
802 } else {
803 sliceParam->slice_data_size = mFrameSize;
804 slice_data_size = mFrameSize;
805 slice_data_addr = mFrameData;
806 }
807
808 vaStatus = vaCreateBuffer(
809 mVADisplay,
810 mVAContext,
811 VASliceParameterBufferType,
812 sizeof(VASliceParameterBufferH264),
813 1,
814 sliceParam,
815 &bufferIDs[bufferIDCount]);
816 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
817 bufferIDCount++;
818
819 vaStatus = vaRenderPicture(
820 mVADisplay,
821 mVAContext,
822 bufferIDs,
823 bufferIDCount);
824 CHECK_VA_STATUS("vaRenderPicture");
825
826 VABufferID slicebufferID;
827
828 vaStatus = vaCreateBuffer(
829 mVADisplay,
830 mVAContext,
831 VASliceDataBufferType,
832 slice_data_size, //size
833 1, //num_elements
834 slice_data_addr,
835 &slicebufferID);
836 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
837
838 vaStatus = vaRenderPicture(
839 mVADisplay,
840 mVAContext,
841 &slicebufferID,
842 1);
843 CHECK_VA_STATUS("vaRenderPicture");
844
845 return DECODE_SUCCESS;
846
847 }
848
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)849 Decode_Status VideoDecoderAVCSecure::getCodecSpecificConfigs(
850 VAProfile profile, VAConfigID *config)
851 {
852 VAStatus vaStatus;
853 VAConfigAttrib attrib[2];
854
855 if (config == NULL) {
856 ETRACE("Invalid parameter!");
857 return DECODE_FAIL;
858 }
859
860 attrib[0].type = VAConfigAttribRTFormat;
861 attrib[0].value = VA_RT_FORMAT_YUV420;
862 attrib[1].type = VAConfigAttribDecSliceMode;
863 attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
864 if (mModularMode) {
865 attrib[1].value = VA_DEC_SLICE_MODE_SUBSAMPLE;
866 }
867
868 vaStatus = vaCreateConfig(
869 mVADisplay,
870 profile,
871 VAEntrypointVLD,
872 &attrib[0],
873 2,
874 config);
875 CHECK_VA_STATUS("vaCreateConfig");
876
877 return DECODE_SUCCESS;
878 }
879