1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderMPEG4.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20
21 #define MAX_PICTURE_WIDTH_MPEG4 1920
22 #define MAX_PICTURE_HEIGHT_MPEG4 1088
23
VideoDecoderMPEG4(const char * mimeType)24 VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType)
25 : VideoDecoderBase(mimeType, VBP_MPEG4),
26 mLastVOPTimeIncrement(0),
27 mExpectingNVOP(false),
28 mSendIQMatrixBuf(false),
29 mLastVOPCodingType(MP4_VOP_TYPE_I),
30 mIsShortHeader(false) {
31 }
32
~VideoDecoderMPEG4()33 VideoDecoderMPEG4::~VideoDecoderMPEG4() {
34 stop();
35 }
36
start(VideoConfigBuffer * buffer)37 Decode_Status VideoDecoderMPEG4::start(VideoConfigBuffer *buffer) {
38 Decode_Status status;
39
40 status = VideoDecoderBase::start(buffer);
41 CHECK_STATUS("VideoDecoderBase::start");
42
43 if (buffer->data == NULL || buffer->size == 0) {
44 WTRACE("No config data to start VA.");
45 return DECODE_SUCCESS;
46 }
47
48 vbp_data_mp42 *data = NULL;
49 status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
50 CHECK_STATUS("VideoDecoderBase::parseBuffer");
51
52 if (data->codec_data.video_object_layer_width > MAX_PICTURE_WIDTH_MPEG4 ||
53 data->codec_data.video_object_layer_height > MAX_PICTURE_HEIGHT_MPEG4) {
54 return DECODE_INVALID_DATA;
55 }
56
57 status = startVA(data);
58 return status;
59 }
60
stop(void)61 void VideoDecoderMPEG4::stop(void) {
62 // drop the last frame and ignore return value
63 endDecodingFrame(true);
64 VideoDecoderBase::stop();
65
66 mLastVOPTimeIncrement = 0;
67 mExpectingNVOP = false;
68 mLastVOPCodingType = MP4_VOP_TYPE_I;
69 }
70
decode(VideoDecodeBuffer * buffer)71 Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
72 Decode_Status status;
73 vbp_data_mp42 *data = NULL;
74 bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
75 if (buffer == NULL) {
76 return DECODE_INVALID_DATA;
77 }
78 if (buffer->flag & IS_SYNC_FRAME) {
79 mIsSyncFrame = true;
80 } else {
81 mIsSyncFrame = false;
82 }
83 buffer->ext = NULL;
84 status = VideoDecoderBase::parseBuffer(
85 buffer->data,
86 buffer->size,
87 false,
88 (void**)&data);
89 CHECK_STATUS("VideoDecoderBase::parseBuffer");
90
91 if (data->codec_data.video_object_layer_width > MAX_PICTURE_WIDTH_MPEG4 ||
92 data->codec_data.video_object_layer_height > MAX_PICTURE_HEIGHT_MPEG4) {
93 return DECODE_INVALID_DATA;
94 }
95
96 if (!mVAStarted) {
97 status = startVA(data);
98 CHECK_STATUS("startVA");
99 }
100
101 if (mSizeChanged && !useGraphicbuffer) {
102 // some container has the incorrect width/height.
103 // send the format change to OMX to update the crop info.
104 mSizeChanged = false;
105 ITRACE("Video size is changed during startVA");
106 return DECODE_FORMAT_CHANGE;
107 }
108
109 if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
110 mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
111 data->codec_data.video_object_layer_width &&
112 data->codec_data.video_object_layer_height) {
113 // update encoded image size
114 ITRACE("Video size is changed. from %dx%d to %dx%d\n", mVideoFormatInfo.width, mVideoFormatInfo.height,
115 data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
116
117 if (useGraphicbuffer && mStoreMetaData) {
118 pthread_mutex_lock(&mFormatLock);
119 }
120 mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
121 mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
122 bool needFlush = false;
123 if (useGraphicbuffer) {
124 if (mStoreMetaData) {
125 needFlush = true;
126
127 mVideoFormatInfo.valid = false;
128 pthread_mutex_unlock(&mFormatLock);
129 } else {
130 needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
131 || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight);
132 }
133 }
134 if (needFlush) {
135 if (mStoreMetaData) {
136 status = endDecodingFrame(false);
137 CHECK_STATUS("endDecodingFrame");
138 } else {
139 flushSurfaceBuffers();
140 }
141 mSizeChanged = false;
142 return DECODE_FORMAT_CHANGE;
143 } else {
144 mSizeChanged = true;
145 }
146
147 setRenderRect();
148 } else {
149 if (useGraphicbuffer && mStoreMetaData) {
150 mVideoFormatInfo.valid = true;
151 }
152 }
153
154 status = decodeFrame(buffer, data);
155 CHECK_STATUS("decodeFrame");
156
157 return status;
158 }
159
flush(void)160 void VideoDecoderMPEG4::flush(void) {
161 VideoDecoderBase::flush();
162
163 mExpectingNVOP = false;
164 mLastVOPTimeIncrement = 0;
165 mLastVOPCodingType = MP4_VOP_TYPE_I;
166 }
167
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_mp42 * data)168 Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data) {
169 Decode_Status status;
170 // check if any slice is parsed, we may just receive configuration data
171 if (data->number_picture_data == 0) {
172 WTRACE("number_picture_data == 0");
173 return DECODE_SUCCESS;
174 }
175 if (data->picture_data && (data->picture_data->picture_param.vop_width == 0 || data->picture_data->picture_param.vop_height == 0)) {
176 if (!data->codec_data.got_vol && data->codec_data.got_vop) {
177 // error enhancement if vol is missing
178 data->picture_data->picture_param.vop_width = mVideoFormatInfo.width;
179 data->picture_data->picture_param.vop_height = mVideoFormatInfo.height;
180 } else {
181 return DECODE_PARSER_FAIL;
182 }
183 }
184
185 uint64_t lastPTS = mCurrentPTS;
186 mCurrentPTS = buffer->timeStamp;
187
188 if (lastPTS != mCurrentPTS) {
189 // finish decoding the last frame
190 status = endDecodingFrame(false);
191 CHECK_STATUS("endDecodingFrame");
192
193 // start decoding a new frame
194 status = beginDecodingFrame(data);
195 if (status == DECODE_MULTIPLE_FRAME) {
196 buffer->ext = &mExtensionBuffer;
197 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
198 mExtensionBuffer.extSize = sizeof(mPackedFrame);
199 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
200 } else if (status != DECODE_SUCCESS) {
201 endDecodingFrame(true);
202 }
203 CHECK_STATUS("beginDecodingFrame");
204 } else {
205 status = continueDecodingFrame(data);
206 if (status == DECODE_MULTIPLE_FRAME) {
207 buffer->ext = &mExtensionBuffer;
208 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
209 mExtensionBuffer.extSize = sizeof(mPackedFrame);
210 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
211 } else if (status != DECODE_SUCCESS) {
212 endDecodingFrame(true);
213 }
214 CHECK_STATUS("continueDecodingFrame");
215 }
216
217 if (buffer->flag & HAS_COMPLETE_FRAME) {
218 // finish decoding current frame
219 status = endDecodingFrame(false);
220 CHECK_STATUS("endDecodingFrame");
221 }
222
223 return DECODE_SUCCESS;
224 }
225
beginDecodingFrame(vbp_data_mp42 * data)226 Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
227
228 Decode_Status status = DECODE_SUCCESS;
229 vbp_picture_data_mp42 *picData = data->picture_data;
230 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
231 int codingType = picParam->vop_fields.bits.vop_coding_type;
232
233 // start sanity checking
234 if (mExpectingNVOP) {
235 // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type
236 // of this frame must be B
237 // for example: {PB} B N P B B P...
238 if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) {
239 WTRACE("Invalid coding type while waiting for n-vop for packed frame.");
240 mExpectingNVOP = false;
241 }
242 }
243
244 // handle N-VOP picuture, it could be a skipped frame or a simple placeholder of packed frame
245 if (picData->vop_coded == 0) {
246 if (mLastReference == NULL) {
247 WTRACE("The last reference is unavailable to construct skipped frame.");
248 flush();
249 mExpectingNVOP = false;
250 // TODO: handle this case
251 return DECODE_SUCCESS;
252 }
253
254 if (mExpectingNVOP) {
255 // P frame is already in queue, just need to update time stamp.
256 mLastReference->renderBuffer.timeStamp = mCurrentPTS;
257 mExpectingNVOP = false;
258 }
259 else {
260 // Do nothing for skip frame as the last frame will be rendered agian by natively
261 // No needs to handle reference frame neither
262 #if 0
263 // this is skipped frame, use the last reference frame as output
264 status = acquireSurfaceBuffer();
265 CHECK_STATUS("acquireSurfaceBuffer");
266 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
267 mAcquiredBuffer->renderBuffer.flag = 0;
268 mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
269 mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
270 // No need to update mappedData for HW decoding
271 //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
272 mAcquiredBuffer->referenceFrame = true;
273 status = outputSurfaceBuffer();
274 CHECK_STATUS("outputSurfaceBuffer");
275 #endif
276 }
277
278 if (data->number_picture_data > 1) {
279 WTRACE("Unexpected to have more picture data following a non-coded VOP.");
280 //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for
281 // coded picture, a frame is lost.
282 // TODO: handle this case
283 // return DECODE_FAIL;
284 }
285 return DECODE_SUCCESS;
286 }
287 else {
288 // Check if we have reference frame(s) for decoding
289 if (codingType == MP4_VOP_TYPE_B) {
290 if (mForwardReference == NULL ||
291 mLastReference == NULL) {
292 if (mIsShortHeader) {
293 status = DECODE_SUCCESS;
294 VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
295 } else
296 return DECODE_NO_REFERENCE;
297 }
298 } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) {
299 if (mLastReference == NULL && mIsSyncFrame == false) {
300 if (mIsShortHeader) {
301 status = DECODE_SUCCESS;
302 VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
303 } else
304 return DECODE_NO_REFERENCE;
305 }
306 }
307 // all sanity checks pass, continue decoding through continueDecodingFrame
308 status = continueDecodingFrame(data);
309 }
310 return status;
311 }
312
continueDecodingFrame(vbp_data_mp42 * data)313 Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
314 Decode_Status status = DECODE_SUCCESS;
315 VAStatus vaStatus = VA_STATUS_SUCCESS;
316 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
317
318 /*
319 Packed Frame Assumption:
320
321 1. In one packed frame, there's only one P or I frame and only one B frame.
322 2. In packed frame, there's no skipped frame (vop_coded = 0)
323 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately).
324 4. N-VOP frame is the frame with vop_coded = 0.
325 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame
326
327
328 I, P, {P, B}, B, N, P, N, I, ...
329 I, P, {P, B}, N, P, N, I, ...
330
331 The first N is placeholder for P frame in the packed frame
332 The second N is a skipped frame
333 */
334
335 vbp_picture_data_mp42 *picData = data->picture_data;
336 for (uint32_t i = 0; i < data->number_picture_data; i++, picData = picData->next_picture_data) {
337 // each slice has its own picture data, video_packet_header following resync_marker may reset picture header, see MP4 spec
338 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
339 int codingType = picParam->vop_fields.bits.vop_coding_type;
340 if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) {
341 WTRACE("Hardware only supports up to one warping point (stationary or translation)");
342 }
343
344 if (picData->vop_coded == 0) {
345 ETRACE("Unexpected to have non-coded VOP.");
346 return DECODE_FAIL;
347 }
348 if (picData->new_picture_flag == 1 || mDecodingFrame == false) {
349 // either condition indicates start of a new frame
350 if (picData->new_picture_flag == 0) {
351 WTRACE("First slice of picture is lost!");
352 // TODO: handle this case
353 }
354 if (mDecodingFrame) {
355 if (codingType == MP4_VOP_TYPE_B){
356 // this indicates the start of a new frame in the packed frame
357 // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
358 if (picParam->vop_time_increment_resolution){
359 uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
360 picParam->vop_time_increment_resolution;
361 increment = increment % picParam->vop_time_increment_resolution;
362 // convert to micro-second
363 // TODO: unit of time stamp varies on different frame work
364 increment = increment * 1e6 / picParam->vop_time_increment_resolution;
365 mAcquiredBuffer->renderBuffer.timeStamp += increment;
366 if (useGraphicBuffer){
367 mPackedFrame.timestamp = mCurrentPTS;
368 mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
369 }
370 }
371 } else {
372 // this indicates the start of a new frame in the packed frame. no B frame int the packet
373 // Update the timestamp according the increment
374 if (picParam->vop_time_increment_resolution){
375 int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution;
376 increment = increment % picParam->vop_time_increment_resolution;
377 //convert to micro-second
378 increment = increment * 1e6 / picParam->vop_time_increment_resolution;
379 if (useGraphicBuffer) {
380 mPackedFrame.timestamp = mCurrentPTS + increment;
381 }
382 else {
383 mCurrentPTS += increment;
384 }
385
386 } else {
387 if (useGraphicBuffer) {
388 mPackedFrame.timestamp = mCurrentPTS + 30000;
389 }
390 else {
391 mCurrentPTS += 30000;
392 }
393 }
394 }
395 endDecodingFrame(false);
396 mExpectingNVOP = true;
397 if (codingType != MP4_VOP_TYPE_B) {
398 mExpectingNVOP = false;
399 }
400 if (useGraphicBuffer) {
401 int32_t count = i - 1;
402 if (count < 0) {
403 WTRACE("Shuld not be here!");
404 return DECODE_SUCCESS;
405 }
406 vbp_picture_data_mp42 *lastpic = data->picture_data;
407 for(int k = 0; k < count; k++ ) {
408 lastpic = lastpic->next_picture_data;
409 }
410 mPackedFrame.offSet = lastpic->slice_data.slice_offset + lastpic->slice_data.slice_size;
411 VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",mPackedFrame.offSet,mPackedFrame.timestamp);
412 return DECODE_MULTIPLE_FRAME;
413 }
414 }
415
416 // acquire a new surface buffer
417 status = acquireSurfaceBuffer();
418 CHECK_STATUS("acquireSurfaceBuffer");
419
420 // sprite is treated as P frame in the display order, so only B frame frame is not used as "reference"
421 mAcquiredBuffer->referenceFrame = (codingType != MP4_VOP_TYPE_B);
422 if (picData->picture_param.vol_fields.bits.interlaced) {
423 // only MPEG-4 studio profile can have field coding. All other profiles
424 // use frame coding only, i.e, there is no field VOP. (see vop_structure in MP4 spec)
425 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
426 } else {
427 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
428 }
429 // TODO: set discontinuity flag
430 mAcquiredBuffer->renderBuffer.flag = 0;
431 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
432 if (mSizeChanged) {
433 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
434 mSizeChanged = false;
435 }
436 if (codingType != MP4_VOP_TYPE_B) {
437 mLastVOPCodingType = codingType;
438 mLastVOPTimeIncrement = picData->vop_time_increment;
439 }
440
441 // start decoding a frame
442 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
443 CHECK_VA_STATUS("vaBeginPicture");
444
445 mDecodingFrame = true;
446 mSendIQMatrixBuf = true;
447 }
448
449 status = decodeSlice(data, picData);
450 CHECK_STATUS("decodeSlice");
451 }
452
453 return DECODE_SUCCESS;
454 }
455
456
decodeSlice(vbp_data_mp42 * data,vbp_picture_data_mp42 * picData)457 Decode_Status VideoDecoderMPEG4::decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData) {
458 Decode_Status status;
459 VAStatus vaStatus;
460 uint32_t bufferIDCount = 0;
461 // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
462 VABufferID bufferIDs[4];
463
464 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
465 vbp_slice_data_mp42 *sliceData = &(picData->slice_data);
466 VASliceParameterBufferMPEG4 *sliceParam = &(sliceData->slice_param);
467
468 // send picture parametre for each slice
469 status = setReference(picParam);
470 CHECK_STATUS("setReference");
471
472 vaStatus = vaCreateBuffer(
473 mVADisplay,
474 mVAContext,
475 VAPictureParameterBufferType,
476 sizeof(VAPictureParameterBufferMPEG4),
477 1,
478 picParam,
479 &bufferIDs[bufferIDCount]);
480 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
481
482 bufferIDCount++;
483 if (picParam->vol_fields.bits.quant_type && mSendIQMatrixBuf)
484 {
485 // only send IQ matrix for the first slice in the picture
486 vaStatus = vaCreateBuffer(
487 mVADisplay,
488 mVAContext,
489 VAIQMatrixBufferType,
490 sizeof(VAIQMatrixBufferMPEG4),
491 1,
492 &(data->iq_matrix_buffer),
493 &bufferIDs[bufferIDCount]);
494 CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
495
496 mSendIQMatrixBuf = false;
497 bufferIDCount++;
498 }
499
500 vaStatus = vaCreateBuffer(
501 mVADisplay,
502 mVAContext,
503 VASliceParameterBufferType,
504 sizeof(VASliceParameterBufferMPEG4),
505 1,
506 sliceParam,
507 &bufferIDs[bufferIDCount]);
508 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
509
510 bufferIDCount++;
511
512 //slice data buffer pointer
513 //Note that this is the original data buffer ptr;
514 // offset to the actual slice data is provided in
515 // slice_data_offset in VASliceParameterBufferMP42
516
517 vaStatus = vaCreateBuffer(
518 mVADisplay,
519 mVAContext,
520 VASliceDataBufferType,
521 sliceData->slice_size, //size
522 1, //num_elements
523 sliceData->buffer_addr + sliceData->slice_offset,
524 &bufferIDs[bufferIDCount]);
525 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
526
527 bufferIDCount++;
528
529 vaStatus = vaRenderPicture(
530 mVADisplay,
531 mVAContext,
532 bufferIDs,
533 bufferIDCount);
534 CHECK_VA_STATUS("vaRenderPicture");
535
536
537 return DECODE_SUCCESS;
538 }
539
setReference(VAPictureParameterBufferMPEG4 * picParam)540 Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *picParam) {
541 switch (picParam->vop_fields.bits.vop_coding_type) {
542 case MP4_VOP_TYPE_I:
543 picParam->forward_reference_picture = VA_INVALID_SURFACE;
544 picParam->backward_reference_picture = VA_INVALID_SURFACE;
545 break;
546 case MP4_VOP_TYPE_P:
547 if (mLastReference == NULL && mIsSyncFrame == false && !mIsShortHeader) {
548 return DECODE_NO_REFERENCE;
549 }
550 if (mLastReference != NULL) {
551 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
552 } else {
553 VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__);
554 picParam->forward_reference_picture = VA_INVALID_SURFACE;
555 }
556 picParam->backward_reference_picture = VA_INVALID_SURFACE;
557 break;
558 case MP4_VOP_TYPE_B:
559 picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType;
560 // WEIRD, CHECK AGAIN !!!!!!!
561 if (mIsShortHeader) {
562 if (mLastReference != NULL) {
563 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
564 } else {
565 VTRACE("%s: no forward reference frame, but keep decoding", __FUNCTION__);
566 picParam->forward_reference_picture = VA_INVALID_SURFACE;
567 }
568 if (mForwardReference != NULL) {
569 picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
570 } else {
571 VTRACE("%s: no backward reference frame, but keep decoding", __FUNCTION__);
572 picParam->backward_reference_picture = VA_INVALID_SURFACE;
573 }
574 } else if (mLastReference == NULL || mForwardReference == NULL) {
575 return DECODE_NO_REFERENCE;
576 } else {
577 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
578 picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
579 }
580 break;
581 case MP4_VOP_TYPE_S:
582 // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference
583 if (mLastReference == NULL) {
584 return DECODE_NO_REFERENCE;
585 }
586 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
587 picParam->backward_reference_picture = VA_INVALID_SURFACE;
588 break;
589
590 default:
591 // Will never reach here;
592 return DECODE_PARSER_FAIL;
593 }
594 return DECODE_SUCCESS;
595 }
596
startVA(vbp_data_mp42 * data)597 Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) {
598 updateFormatInfo(data);
599
600 VAProfile vaProfile;
601
602 if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
603 vaProfile = VAProfileMPEG4AdvancedSimple;
604 } else {
605 vaProfile = VAProfileMPEG4Simple;
606 }
607
608 mIsShortHeader = data->codec_data.short_video_header;
609
610 return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile);
611 }
612
updateFormatInfo(vbp_data_mp42 * data)613 void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) {
614 ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
615 mVideoFormatInfo.width, mVideoFormatInfo.height,
616 data->codec_data.video_object_layer_width,
617 data->codec_data.video_object_layer_height);
618 // error enhancement if vol is missing
619 if (!data->codec_data.got_vol && data->codec_data.got_vop) {
620 data->codec_data.video_object_layer_width = mVideoFormatInfo.width;
621 data->codec_data.video_object_layer_height = mVideoFormatInfo.height;
622 }
623
624 mVideoFormatInfo.cropBottom = data->codec_data.video_object_layer_height > mVideoFormatInfo.height ?
625 data->codec_data.video_object_layer_height - mVideoFormatInfo.height : 0;
626 mVideoFormatInfo.cropRight = data->codec_data.video_object_layer_width > mVideoFormatInfo.width ?
627 data->codec_data.video_object_layer_width - mVideoFormatInfo.width : 0;
628
629 if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
630 mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
631 data->codec_data.video_object_layer_width &&
632 data->codec_data.video_object_layer_height) {
633 // update encoded image size
634 mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
635 mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
636 mSizeChanged = true;
637 ITRACE("Video size is changed.");
638 }
639
640 // video_range has default value of 0. Y ranges from 16 to 235.
641 mVideoFormatInfo.videoRange = data->codec_data.video_range;
642
643 switch (data->codec_data.matrix_coefficients) {
644 case 1:
645 mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
646 break;
647
648 // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
649 // SMPTE 170M/BT601
650 case 5:
651 case 6:
652 mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
653 break;
654
655 default:
656 // unknown color matrix, set to 0 so color space flag will not be set.
657 mVideoFormatInfo.colorMatrix = 0;
658 break;
659 }
660
661 mVideoFormatInfo.aspectX = data->codec_data.par_width;
662 mVideoFormatInfo.aspectY = data->codec_data.par_height;
663 //mVideoFormatInfo.bitrate = data->codec_data.bit_rate;
664 mVideoFormatInfo.valid = true;
665
666 setRenderRect();
667 setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
668 }
669
checkHardwareCapability()670 Decode_Status VideoDecoderMPEG4::checkHardwareCapability() {
671 VAStatus vaStatus;
672 VAConfigAttrib cfgAttribs[2];
673 cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
674 cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
675 vaStatus = vaGetConfigAttributes(mVADisplay,
676 mIsShortHeader ? VAProfileH263Baseline : VAProfileMPEG4AdvancedSimple,
677 VAEntrypointVLD, cfgAttribs, 2);
678 CHECK_VA_STATUS("vaGetConfigAttributes");
679 if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
680 ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
681 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
682 return DECODE_DRIVER_FAIL;
683 }
684
685 return DECODE_SUCCESS;
686 }
687