1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderWMV.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20
VideoDecoderWMV(const char * mimeType)21 VideoDecoderWMV::VideoDecoderWMV(const char *mimeType)
22 : VideoDecoderBase(mimeType, VBP_VC1),
23 mBufferIDs(NULL),
24 mNumBufferIDs(0),
25 mConfigDataParsed(false),
26 mRangeMapped(false),
27 mDeblockedCurrPicIndex(0),
28 mDeblockedLastPicIndex(1),
29 mDeblockedForwardPicIndex(2) {
30 }
31
32
~VideoDecoderWMV()33 VideoDecoderWMV::~VideoDecoderWMV() {
34 stop();
35 }
36
start(VideoConfigBuffer * buffer)37 Decode_Status VideoDecoderWMV::start(VideoConfigBuffer *buffer) {
38 Decode_Status status;
39
40 status = VideoDecoderBase::start(buffer);
41 CHECK_STATUS("VideoDecoderBase::start");
42
43 if (buffer->data == NULL || buffer->size == 0) {
44 WTRACE("No config data to start VA.");
45 return DECODE_SUCCESS;
46 }
47
48 vbp_data_vc1 *data = NULL;
49 status = parseBuffer(buffer->data, buffer->size, &data);
50 CHECK_STATUS("parseBuffer");
51
52 status = startVA(data);
53 return status;
54 }
55
stop(void)56 void VideoDecoderWMV::stop(void) {
57 if (mBufferIDs) {
58 delete [] mBufferIDs;
59 mBufferIDs = NULL;
60 }
61 mNumBufferIDs = 0;
62 mConfigDataParsed = false;
63 mRangeMapped = false;
64
65 mDeblockedCurrPicIndex = 0;
66 mDeblockedLastPicIndex = 1;
67 mDeblockedForwardPicIndex = 2;
68
69 VideoDecoderBase::stop();
70 }
71
flush(void)72 void VideoDecoderWMV::flush(void) {
73 VideoDecoderBase::flush();
74
75 mRangeMapped = false;
76 mDeblockedCurrPicIndex = 0;
77 mDeblockedLastPicIndex = 1;
78 mDeblockedForwardPicIndex = 2;
79 }
80
decode(VideoDecodeBuffer * buffer)81 Decode_Status VideoDecoderWMV::decode(VideoDecodeBuffer *buffer) {
82 Decode_Status status;
83 vbp_data_vc1 *data = NULL;
84 bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
85 if (buffer == NULL) {
86 return DECODE_INVALID_DATA;
87 }
88
89 status = parseBuffer(buffer->data, buffer->size, &data);
90 CHECK_STATUS("parseBuffer");
91
92 if (!mVAStarted) {
93 status = startVA(data);
94 CHECK_STATUS("startVA");
95 }
96
97 if (mSizeChanged && !useGraphicbuffer) {
98 mSizeChanged = false;
99 return DECODE_FORMAT_CHANGE;
100 }
101
102 if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH ||
103 mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) &&
104 data->se_data->CODED_WIDTH &&
105 data->se_data->CODED_HEIGHT) {
106 ITRACE("video size is changed from %dx%d to %dx%d", mVideoFormatInfo.width, mVideoFormatInfo.height,
107 data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
108 mVideoFormatInfo.width = data->se_data->CODED_WIDTH;
109 mVideoFormatInfo.height = data->se_data->CODED_HEIGHT;
110 bool noNeedFlush = false;
111 if (useGraphicbuffer) {
112 noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
113 && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
114 }
115
116 setRenderRect();
117
118 if (noNeedFlush) {
119 mSizeChanged = true;
120 } else {
121 flushSurfaceBuffers();
122 mSizeChanged = false;
123 return DECODE_FORMAT_CHANGE;
124 }
125 }
126
127 status = decodeFrame(buffer, data);
128 CHECK_STATUS("decodeFrame");
129 return status;
130 }
131
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_vc1 * data)132 Decode_Status VideoDecoderWMV::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vc1 *data) {
133 Decode_Status status;
134 mCurrentPTS = buffer->timeStamp;
135 if (0 == data->num_pictures || NULL == data->pic_data) {
136 WTRACE("Number of pictures is 0, buffer contains configuration data only?");
137 return DECODE_SUCCESS;
138 }
139
140 if (data->pic_data[0].picture_is_skipped == VC1_PTYPE_SKIPPED) {
141
142 // Do nothing for skip frame as the last frame will be rendered agian by natively
143 // No needs to handle reference frame neither
144 return DECODE_SUCCESS;
145 #if 0
146 //use the last P or I frame surface for skipped frame and treat it as P frame
147 if (mLastReference == NULL) {
148 // TODO: handle this case
149 WTRACE("The last reference is unavailable to construct skipped frame.");
150 return DECODE_SUCCESS;
151 }
152
153 status = acquireSurfaceBuffer();
154 CHECK_STATUS("acquireSurfaceBuffer");
155 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
156 mAcquiredBuffer->renderBuffer.flag = 0;
157 mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
158 mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
159 // No need to update mappedData for HW decoding
160 //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
161 mAcquiredBuffer->referenceFrame = true;
162 // let outputSurfaceBuffer handle "asReference" for VC1
163 status = outputSurfaceBuffer();
164 return status;
165 #endif
166 }
167
168 status = acquireSurfaceBuffer();
169 CHECK_STATUS("acquireSurfaceBuffer");
170
171 mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp;
172 if (buffer->flag & HAS_DISCONTINUITY) {
173 mAcquiredBuffer->renderBuffer.flag |= HAS_DISCONTINUITY;
174 }
175 if (buffer->flag & WANT_DECODE_ONLY) {
176 mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY;
177 }
178 if (mSizeChanged) {
179 mSizeChanged = false;
180 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
181 }
182
183 if (data->num_pictures > 1) {
184 if (data->pic_data[0].pic_parms->picture_fields.bits.is_first_field) {
185 mAcquiredBuffer->renderBuffer.scanFormat = VA_TOP_FIELD;
186 } else {
187 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD;
188 }
189 } else {
190 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
191 }
192
193 mRangeMapped = (data->se_data->RANGE_MAPY_FLAG || data->se_data->RANGE_MAPUV_FLAG || data->se_data->RANGERED);
194
195 int frameType = data->pic_data[0].pic_parms->picture_fields.bits.picture_type;
196 mAcquiredBuffer->referenceFrame = (frameType == VC1_PTYPE_I || frameType == VC1_PTYPE_P);
197
198 // TODO: handle multiple frames parsed from a sample buffer
199 int numPictures = (data->num_pictures > 1) ? 2 : 1;
200
201 for (int index = 0; index < numPictures; index++) {
202 status = decodePicture(data, index);
203 if (status != DECODE_SUCCESS) {
204 endDecodingFrame(true);
205 return status;
206 }
207 }
208
209 if (mRangeMapped) {
210 updateDeblockedPicIndexes(frameType);
211 }
212
213 // let outputSurfaceBuffer handle "asReference" for VC1
214 status = outputSurfaceBuffer();
215 return status;
216 }
217
218
decodePicture(vbp_data_vc1 * data,int32_t picIndex)219 Decode_Status VideoDecoderWMV::decodePicture(vbp_data_vc1 *data, int32_t picIndex) {
220 VAStatus vaStatus = VA_STATUS_SUCCESS;
221 Decode_Status status;
222 int32_t bufferIDCount = 0;
223 vbp_picture_data_vc1 *picData = &(data->pic_data[picIndex]);
224 VAPictureParameterBufferVC1 *picParams = picData->pic_parms;
225
226 if (picParams == NULL) {
227 return DECODE_PARSER_FAIL;
228 }
229
230 status = allocateVABufferIDs(picData->num_slices * 2 + 2);
231 CHECK_STATUS("allocateVABufferIDs");
232
233 status = setReference(picParams, picIndex, mAcquiredBuffer->renderBuffer.surface);
234 CHECK_STATUS("setReference");
235
236 if (mRangeMapped) {
237 // keep the destination surface for the picture after decoding and in-loop filtering
238 picParams->inloop_decoded_picture = mExtraSurfaces[mDeblockedCurrPicIndex];
239 } else {
240 picParams->inloop_decoded_picture = VA_INVALID_SURFACE;
241 }
242
243 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
244 CHECK_VA_STATUS("vaBeginPicture");
245 // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding.
246 mDecodingFrame = true;
247
248 vaStatus = vaCreateBuffer(
249 mVADisplay,
250 mVAContext,
251 VAPictureParameterBufferType,
252 sizeof(VAPictureParameterBufferVC1),
253 1,
254 picParams,
255 &mBufferIDs[bufferIDCount]);
256 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
257 bufferIDCount++;
258
259 if (picParams->bitplane_present.value) {
260 vaStatus = vaCreateBuffer(
261 mVADisplay,
262 mVAContext,
263 VABitPlaneBufferType,
264 picData->size_bitplanes,
265 1,
266 picData->packed_bitplanes,
267 &mBufferIDs[bufferIDCount]);
268 CHECK_VA_STATUS("vaCreateBitPlaneBuffer");
269 bufferIDCount++;
270 }
271
272 for (uint32_t i = 0; i < picData->num_slices; i++) {
273 vaStatus = vaCreateBuffer(
274 mVADisplay,
275 mVAContext,
276 VASliceParameterBufferType,
277 sizeof(VASliceParameterBufferVC1),
278 1,
279 &(picData->slc_data[i].slc_parms),
280 &mBufferIDs[bufferIDCount]);
281 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
282 bufferIDCount++;
283
284 vaStatus = vaCreateBuffer(
285 mVADisplay,
286 mVAContext,
287 VASliceDataBufferType,
288 //size
289 picData->slc_data[i].slice_size,
290 //num_elements
291 1,
292 //slice data buffer pointer
293 //Note that this is the original data buffer ptr;
294 // offset to the actual slice data is provided in
295 // slice_data_offset in VASliceParameterBufferVC1
296 picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset,
297 &mBufferIDs[bufferIDCount]);
298 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
299 bufferIDCount++;
300 }
301
302 vaStatus = vaRenderPicture(
303 mVADisplay,
304 mVAContext,
305 mBufferIDs,
306 bufferIDCount);
307 CHECK_VA_STATUS("vaRenderPicture");
308
309 vaStatus = vaEndPicture(mVADisplay, mVAContext);
310 mDecodingFrame = false;
311 CHECK_VA_STATUS("vaRenderPicture");
312
313 return DECODE_SUCCESS;
314 }
315
316
setReference(VAPictureParameterBufferVC1 * params,int32_t picIndex,VASurfaceID current)317 Decode_Status VideoDecoderWMV::setReference(
318 VAPictureParameterBufferVC1 *params,
319 int32_t picIndex,
320 VASurfaceID current) {
321 int frameType = params->picture_fields.bits.picture_type;
322 switch (frameType) {
323 case VC1_PTYPE_I:
324 params->forward_reference_picture = current;
325 params->backward_reference_picture = current;
326 break;
327 case VC1_PTYPE_P:
328 // check REFDIST in the picture parameter buffer
329 if (0 != params->reference_fields.bits.reference_distance_flag &&
330 0 != params->reference_fields.bits.reference_distance) {
331 /* The previous decoded frame (distance is up to 16 but not 0) is used
332 for reference. Not supported here.
333 */
334 return DECODE_NO_REFERENCE;
335 }
336 if (1 == picIndex) {
337 // handle interlace field coding case
338 if (1 == params->reference_fields.bits.num_reference_pictures ||
339 1 == params->reference_fields.bits.reference_field_pic_indicator) {
340 /*
341 two reference fields or the second closest I/P field is used for
342 prediction. Set forward reference picture to INVALID so it will be
343 updated to a valid previous reconstructed reference frame later.
344 */
345 params->forward_reference_picture = VA_INVALID_SURFACE;
346 } else {
347 /* the closest I/P is used for reference so it must be the
348 complementary field in the same surface.
349 */
350 params->forward_reference_picture = current;
351 }
352 }
353 if (VA_INVALID_SURFACE == params->forward_reference_picture) {
354 if (mLastReference == NULL) {
355 return DECODE_NO_REFERENCE;
356 }
357 params->forward_reference_picture = mLastReference->renderBuffer.surface;
358 }
359 params->backward_reference_picture = VA_INVALID_SURFACE;
360 break;
361 case VC1_PTYPE_B:
362 if (mForwardReference == NULL || mLastReference == NULL) {
363 return DECODE_NO_REFERENCE;
364 }
365 params->forward_reference_picture = mForwardReference->renderBuffer.surface;
366 params->backward_reference_picture = mLastReference->renderBuffer.surface;
367 break;
368 case VC1_PTYPE_BI:
369 params->forward_reference_picture = VA_INVALID_SURFACE;
370 params->backward_reference_picture = VA_INVALID_SURFACE;
371 break;
372 case VC1_PTYPE_SKIPPED:
373 //Will never happen here
374 break;
375 default:
376 break;
377 }
378 return DECODE_SUCCESS;
379 }
380
updateDeblockedPicIndexes(int frameType)381 void VideoDecoderWMV::updateDeblockedPicIndexes(int frameType) {
382 int32_t curPicIndex = mDeblockedCurrPicIndex;
383
384 /* Out Loop (range map) buffers */
385 if (frameType != VC1_PTYPE_SKIPPED) {
386 if ((frameType == VC1_PTYPE_I) || (frameType == VC1_PTYPE_P)) {
387 mDeblockedCurrPicIndex = mDeblockedLastPicIndex;
388 mDeblockedLastPicIndex = curPicIndex;
389 } else {
390 mDeblockedCurrPicIndex = mDeblockedForwardPicIndex;
391 mDeblockedForwardPicIndex = curPicIndex;
392 }
393 }
394 }
395
updateConfigData(uint8_t * configData,int32_t configDataLen,uint8_t ** newConfigData,int32_t * newConfigDataLen)396 Decode_Status VideoDecoderWMV::updateConfigData(
397 uint8_t *configData,
398 int32_t configDataLen,
399 uint8_t **newConfigData,
400 int32_t* newConfigDataLen) {
401 int32_t i = 0;
402 uint8_t *p = configData;
403
404 /* Check for start codes. If one exist, then this is VC-1 and not WMV. */
405 while (i < configDataLen - 2) {
406 if ((p[i] == 0) &&
407 (p[i + 1] == 0) &&
408 (p[i + 2] == 1)) {
409 *newConfigData = NULL;
410 *newConfigDataLen = 0;
411 return DECODE_SUCCESS;
412 }
413 i++;
414 }
415
416 *newConfigDataLen = configDataLen + 9;
417 p = *newConfigData = new uint8_t [*newConfigDataLen];
418 if (!p) {
419 return DECODE_MEMORY_FAIL;
420 }
421
422 /* If we get here we have 4+ bytes of codec data that must be formatted */
423 /* to pass through as an RCV sequence header. */
424 p[0] = 0;
425 p[1] = 0;
426 p[2] = 1;
427 p[3] = 0x0f; /* Start code. */
428 p[4] = (mVideoFormatInfo.width >> 8) & 0x0ff;
429 p[5] = mVideoFormatInfo.width & 0x0ff;
430 p[6] = (mVideoFormatInfo.height >> 8) & 0x0ff;
431 p[7] = mVideoFormatInfo.height & 0x0ff;
432
433 memcpy(p + 8, configData, configDataLen);
434 *(p + configDataLen + 8) = 0x80;
435
436 return DECODE_SUCCESS;
437 }
438
startVA(vbp_data_vc1 * data)439 Decode_Status VideoDecoderWMV::startVA(vbp_data_vc1 *data) {
440 updateFormatInfo(data);
441
442 VAProfile vaProfile;
443 switch (data->se_data->PROFILE) {
444 case 0:
445 vaProfile = VAProfileVC1Simple;
446 break;
447 case 1:
448 vaProfile = VAProfileVC1Main;
449 break;
450 default:
451 vaProfile = VAProfileVC1Advanced;
452 break;
453 }
454
455 return VideoDecoderBase::setupVA(VC1_SURFACE_NUMBER, vaProfile, VC1_EXTRA_SURFACE_NUMBER);
456 }
457
updateFormatInfo(vbp_data_vc1 * data)458 void VideoDecoderWMV::updateFormatInfo(vbp_data_vc1 *data) {
459 ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
460 mVideoFormatInfo.width, mVideoFormatInfo.height,
461 data->se_data->CODED_WIDTH, data->se_data->CODED_HEIGHT);
462
463 mVideoFormatInfo.cropBottom = data->se_data->CODED_HEIGHT > mVideoFormatInfo.height ?
464 data->se_data->CODED_HEIGHT - mVideoFormatInfo.height : 0;
465 mVideoFormatInfo.cropRight = data->se_data->CODED_WIDTH > mVideoFormatInfo.width ?
466 data->se_data->CODED_WIDTH - mVideoFormatInfo.width : 0;
467
468 if ((mVideoFormatInfo.width != data->se_data->CODED_WIDTH ||
469 mVideoFormatInfo.height != data->se_data->CODED_HEIGHT) &&
470 data->se_data->CODED_WIDTH &&
471 data->se_data->CODED_HEIGHT) {
472 // encoded image size
473 mVideoFormatInfo.width = data->se_data->CODED_WIDTH;
474 mVideoFormatInfo.height = data->se_data->CODED_HEIGHT;
475 mSizeChanged = true;
476 ITRACE("Video size is changed.");
477 }
478
479 // scaling has been performed on the decoded image.
480 mVideoFormatInfo.videoRange = 1;
481
482 switch (data->se_data->MATRIX_COEF) {
483 case 1:
484 mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
485 break;
486 // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996.
487 case 6:
488 mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
489 break;
490 default:
491 // unknown color matrix, set to 0 so color space flag will not be set.
492 mVideoFormatInfo.colorMatrix = 0;
493 break;
494 }
495
496 mVideoFormatInfo.aspectX = data->se_data->ASPECT_HORIZ_SIZE;
497 mVideoFormatInfo.aspectY = data->se_data->ASPECT_VERT_SIZE;
498 mVideoFormatInfo.bitrate = 0; //data->se_data->bitrate;
499 mVideoFormatInfo.valid = true;
500
501 setRenderRect();
502 }
503
allocateVABufferIDs(int32_t number)504 Decode_Status VideoDecoderWMV::allocateVABufferIDs(int32_t number) {
505 if (mNumBufferIDs > number) {
506 return DECODE_SUCCESS;
507 }
508 if (mBufferIDs) {
509 delete [] mBufferIDs;
510 }
511 mBufferIDs = NULL;
512 mNumBufferIDs = 0;
513 mBufferIDs = new VABufferID [number];
514 if (mBufferIDs == NULL) {
515 return DECODE_MEMORY_FAIL;
516 }
517 mNumBufferIDs = number;
518 return DECODE_SUCCESS;
519 }
520
parseBuffer(uint8_t * data,int32_t size,vbp_data_vc1 ** vbpData)521 Decode_Status VideoDecoderWMV::parseBuffer(uint8_t *data, int32_t size, vbp_data_vc1 **vbpData) {
522 Decode_Status status;
523
524 if (data == NULL || size == 0) {
525 return DECODE_INVALID_DATA;
526 }
527
528 if (mConfigDataParsed) {
529 status = VideoDecoderBase::parseBuffer(data, size, false, (void**)vbpData);
530 CHECK_STATUS("VideoDecoderBase::parseBuffer");
531 } else {
532 uint8_t *newData = NULL;
533 int32_t newSize = 0;
534 status = updateConfigData(data, size, &newData, &newSize);
535 CHECK_STATUS("updateConfigData");
536
537 if (newSize) {
538 status = VideoDecoderBase::parseBuffer(newData, newSize, true, (void**)vbpData);
539 delete [] newData;
540 } else {
541 status = VideoDecoderBase::parseBuffer(data, size, true, (void**)vbpData);
542 }
543 CHECK_STATUS("VideoDecoderBase::parseBuffer");
544 mConfigDataParsed = true;
545 }
546 return DECODE_SUCCESS;
547 }
548
549
checkHardwareCapability()550 Decode_Status VideoDecoderWMV::checkHardwareCapability() {
551 #ifndef USE_GEN_HW
552 VAStatus vaStatus;
553 VAConfigAttrib cfgAttribs[2];
554 cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
555 cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
556 vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileVC1Advanced,
557 VAEntrypointVLD, cfgAttribs, 2);
558 CHECK_VA_STATUS("vaGetConfigAttributes");
559 if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
560 ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
561 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
562 return DECODE_DRIVER_FAIL;
563 }
564 #endif
565 return DECODE_SUCCESS;
566 }
567
568
569