1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderMPEG4.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20
VideoDecoderMPEG4(const char * mimeType)21 VideoDecoderMPEG4::VideoDecoderMPEG4(const char *mimeType)
22 : VideoDecoderBase(mimeType, VBP_MPEG4),
23 mLastVOPTimeIncrement(0),
24 mExpectingNVOP(false),
25 mSendIQMatrixBuf(false),
26 mLastVOPCodingType(MP4_VOP_TYPE_I),
27 mIsShortHeader(false) {
28 }
29
~VideoDecoderMPEG4()30 VideoDecoderMPEG4::~VideoDecoderMPEG4() {
31 stop();
32 }
33
start(VideoConfigBuffer * buffer)34 Decode_Status VideoDecoderMPEG4::start(VideoConfigBuffer *buffer) {
35 Decode_Status status;
36
37 status = VideoDecoderBase::start(buffer);
38 CHECK_STATUS("VideoDecoderBase::start");
39
40 if (buffer->data == NULL || buffer->size == 0) {
41 WTRACE("No config data to start VA.");
42 return DECODE_SUCCESS;
43 }
44
45 vbp_data_mp42 *data = NULL;
46 status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
47 CHECK_STATUS("VideoDecoderBase::parseBuffer");
48
49 status = startVA(data);
50 return status;
51 }
52
stop(void)53 void VideoDecoderMPEG4::stop(void) {
54 // drop the last frame and ignore return value
55 endDecodingFrame(true);
56 VideoDecoderBase::stop();
57
58 mLastVOPTimeIncrement = 0;
59 mExpectingNVOP = false;
60 mLastVOPCodingType = MP4_VOP_TYPE_I;
61 }
62
decode(VideoDecodeBuffer * buffer)63 Decode_Status VideoDecoderMPEG4::decode(VideoDecodeBuffer *buffer) {
64 Decode_Status status;
65 vbp_data_mp42 *data = NULL;
66 bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
67 if (buffer == NULL) {
68 return DECODE_INVALID_DATA;
69 }
70 if (buffer->flag & IS_SYNC_FRAME) {
71 mIsSyncFrame = true;
72 } else {
73 mIsSyncFrame = false;
74 }
75 buffer->ext = NULL;
76 status = VideoDecoderBase::parseBuffer(
77 buffer->data,
78 buffer->size,
79 false,
80 (void**)&data);
81 CHECK_STATUS("VideoDecoderBase::parseBuffer");
82
83 if (!mVAStarted) {
84 status = startVA(data);
85 CHECK_STATUS("startVA");
86 }
87
88 if (mSizeChanged && !useGraphicbuffer) {
89 // some container has the incorrect width/height.
90 // send the format change to OMX to update the crop info.
91 mSizeChanged = false;
92 ITRACE("Video size is changed during startVA");
93 return DECODE_FORMAT_CHANGE;
94 }
95
96 if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
97 mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
98 data->codec_data.video_object_layer_width &&
99 data->codec_data.video_object_layer_height) {
100 // update encoded image size
101 ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height,
102 data->codec_data.video_object_layer_width,data->codec_data.video_object_layer_height);
103 bool noNeedFlush = false;
104 mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
105 mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
106 if (useGraphicbuffer) {
107 noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
108 && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
109 }
110 if (!noNeedFlush) {
111 flushSurfaceBuffers();
112 mSizeChanged = false;
113 return DECODE_FORMAT_CHANGE;
114 } else {
115 mSizeChanged = true;
116 }
117
118 setRenderRect();
119 }
120
121 status = decodeFrame(buffer, data);
122 CHECK_STATUS("decodeFrame");
123
124 return status;
125 }
126
flush(void)127 void VideoDecoderMPEG4::flush(void) {
128 VideoDecoderBase::flush();
129
130 mExpectingNVOP = false;
131 mLastVOPTimeIncrement = 0;
132 mLastVOPCodingType = MP4_VOP_TYPE_I;
133 }
134
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_mp42 * data)135 Decode_Status VideoDecoderMPEG4::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data) {
136 Decode_Status status;
137 // check if any slice is parsed, we may just receive configuration data
138 if (data->number_picture_data == 0) {
139 WTRACE("number_picture_data == 0");
140 return DECODE_SUCCESS;
141 }
142
143 // When the MPEG4 parser gets the invaild parameters, add the check
144 // and return error to OMX to avoid mediaserver crash.
145 if (data->picture_data && (data->picture_data->picture_param.vop_width == 0
146 || data->picture_data->picture_param.vop_height == 0)) {
147 return DECODE_PARSER_FAIL;
148 }
149
150 uint64_t lastPTS = mCurrentPTS;
151 mCurrentPTS = buffer->timeStamp;
152
153 if (lastPTS != mCurrentPTS) {
154 // finish decoding the last frame
155 status = endDecodingFrame(false);
156 CHECK_STATUS("endDecodingFrame");
157
158 // start decoding a new frame
159 status = beginDecodingFrame(data);
160 if (status == DECODE_MULTIPLE_FRAME) {
161 buffer->ext = &mExtensionBuffer;
162 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
163 mExtensionBuffer.extSize = sizeof(mPackedFrame);
164 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
165 } else if (status != DECODE_SUCCESS) {
166 endDecodingFrame(true);
167 }
168 CHECK_STATUS("beginDecodingFrame");
169 } else {
170 status = continueDecodingFrame(data);
171 if (status == DECODE_MULTIPLE_FRAME) {
172 buffer->ext = &mExtensionBuffer;
173 mExtensionBuffer.extType = PACKED_FRAME_TYPE;
174 mExtensionBuffer.extSize = sizeof(mPackedFrame);
175 mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
176 } else if (status != DECODE_SUCCESS) {
177 endDecodingFrame(true);
178 }
179 CHECK_STATUS("continueDecodingFrame");
180 }
181
182 if (buffer->flag & HAS_COMPLETE_FRAME) {
183 // finish decoding current frame
184 status = endDecodingFrame(false);
185 CHECK_STATUS("endDecodingFrame");
186 }
187
188 return DECODE_SUCCESS;
189 }
190
beginDecodingFrame(vbp_data_mp42 * data)191 Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
192
193 Decode_Status status = DECODE_SUCCESS;
194 vbp_picture_data_mp42 *picData = data->picture_data;
195 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
196 int codingType = picParam->vop_fields.bits.vop_coding_type;
197
198 // start sanity checking
199 if (mExpectingNVOP) {
200 // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type
201 // of this frame must be B
202 // for example: {PB} B N P B B P...
203 if (picData->vop_coded == 1 && codingType != MP4_VOP_TYPE_B) {
204 WTRACE("Invalid coding type while waiting for n-vop for packed frame.");
205 mExpectingNVOP = false;
206 }
207 }
208
209 // handle N-VOP picuture, it could be a skipped frame or a simple placeholder of packed frame
210 if (picData->vop_coded == 0) {
211 if (mLastReference == NULL) {
212 WTRACE("The last reference is unavailable to construct skipped frame.");
213 flush();
214 mExpectingNVOP = false;
215 // TODO: handle this case
216 return DECODE_SUCCESS;
217 }
218
219 if (mExpectingNVOP) {
220 // P frame is already in queue, just need to update time stamp.
221 mLastReference->renderBuffer.timeStamp = mCurrentPTS;
222 mExpectingNVOP = false;
223 }
224 else {
225 // Do nothing for skip frame as the last frame will be rendered agian by natively
226 // No needs to handle reference frame neither
227 #if 0
228 // this is skipped frame, use the last reference frame as output
229 status = acquireSurfaceBuffer();
230 CHECK_STATUS("acquireSurfaceBuffer");
231 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
232 mAcquiredBuffer->renderBuffer.flag = 0;
233 mAcquiredBuffer->renderBuffer.scanFormat = mLastReference->renderBuffer.scanFormat;
234 mAcquiredBuffer->renderBuffer.surface = mLastReference->renderBuffer.surface;
235 // No need to update mappedData for HW decoding
236 //mAcquiredBuffer->mappedData.data = mLastReference->mappedData.data;
237 mAcquiredBuffer->referenceFrame = true;
238 status = outputSurfaceBuffer();
239 CHECK_STATUS("outputSurfaceBuffer");
240 #endif
241 }
242
243 if (data->number_picture_data > 1) {
244 WTRACE("Unexpected to have more picture data following a non-coded VOP.");
245 //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for
246 // coded picture, a frame is lost.
247 // TODO: handle this case
248 // return DECODE_FAIL;
249 }
250 return DECODE_SUCCESS;
251 }
252 else {
253 // Check if we have reference frame(s) for decoding
254 if (codingType == MP4_VOP_TYPE_B) {
255 if (mForwardReference == NULL ||
256 mLastReference == NULL) {
257 if (mIsShortHeader) {
258 status = DECODE_SUCCESS;
259 VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
260 } else
261 return DECODE_NO_REFERENCE;
262 }
263 } else if (codingType == MP4_VOP_TYPE_P || codingType == MP4_VOP_TYPE_S) {
264 if (mLastReference == NULL && mIsSyncFrame == false) {
265 if (mIsShortHeader) {
266 status = DECODE_SUCCESS;
267 VTRACE("%s: No reference frame but keep decoding", __FUNCTION__);
268 } else
269 return DECODE_NO_REFERENCE;
270 }
271 }
272 // all sanity checks pass, continue decoding through continueDecodingFrame
273 status = continueDecodingFrame(data);
274 }
275 return status;
276 }
277
continueDecodingFrame(vbp_data_mp42 * data)278 Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
279 Decode_Status status = DECODE_SUCCESS;
280 VAStatus vaStatus = VA_STATUS_SUCCESS;
281 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
282
283 /*
284 Packed Frame Assumption:
285
286 1. In one packed frame, there's only one P or I frame and only one B frame.
287 2. In packed frame, there's no skipped frame (vop_coded = 0)
288 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately).
289 4. N-VOP frame is the frame with vop_coded = 0.
290 5. The timestamp of N-VOP frame will be used for P or I frame in the packed frame
291
292
293 I, P, {P, B}, B, N, P, N, I, ...
294 I, P, {P, B}, N, P, N, I, ...
295
296 The first N is placeholder for P frame in the packed frame
297 The second N is a skipped frame
298 */
299
300 vbp_picture_data_mp42 *picData = data->picture_data;
301 for (uint32_t i = 0; i < data->number_picture_data; i++, picData = picData->next_picture_data) {
302 // each slice has its own picture data, video_packet_header following resync_marker may reset picture header, see MP4 spec
303 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
304 int codingType = picParam->vop_fields.bits.vop_coding_type;
305 if (codingType == MP4_VOP_TYPE_S && picParam->no_of_sprite_warping_points > 1) {
306 WTRACE("Hardware only supports up to one warping point (stationary or translation)");
307 }
308
309 if (picData->vop_coded == 0) {
310 ETRACE("Unexpected to have non-coded VOP.");
311 return DECODE_FAIL;
312 }
313 if (picData->new_picture_flag == 1 || mDecodingFrame == false) {
314 // either condition indicates start of a new frame
315 if (picData->new_picture_flag == 0) {
316 WTRACE("First slice of picture is lost!");
317 // TODO: handle this case
318 }
319 if (mDecodingFrame) {
320 if (codingType == MP4_VOP_TYPE_B){
321 // this indicates the start of a new frame in the packed frame
322 // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
323 if (picParam->vop_time_increment_resolution){
324 uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
325 picParam->vop_time_increment_resolution;
326 increment = increment % picParam->vop_time_increment_resolution;
327 // convert to micro-second
328 // TODO: unit of time stamp varies on different frame work
329 increment = increment * 1e6 / picParam->vop_time_increment_resolution;
330 mAcquiredBuffer->renderBuffer.timeStamp += increment;
331 if (useGraphicBuffer){
332 mPackedFrame.timestamp = mCurrentPTS;
333 mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
334 }
335 }
336 } else {
337 // this indicates the start of a new frame in the packed frame. no B frame int the packet
338 // Update the timestamp according the increment
339 if (picParam->vop_time_increment_resolution){
340 int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution;
341 increment = increment % picParam->vop_time_increment_resolution;
342 //convert to micro-second
343 increment = increment * 1e6 / picParam->vop_time_increment_resolution;
344 if (useGraphicBuffer) {
345 mPackedFrame.timestamp = mCurrentPTS + increment;
346 }
347 else {
348 mCurrentPTS += increment;
349 }
350
351 } else {
352 if (useGraphicBuffer) {
353 mPackedFrame.timestamp = mCurrentPTS + 30000;
354 }
355 else {
356 mCurrentPTS += 30000;
357 }
358 }
359 }
360 endDecodingFrame(false);
361 mExpectingNVOP = true;
362 if (codingType != MP4_VOP_TYPE_B) {
363 mExpectingNVOP = false;
364 }
365 if (useGraphicBuffer) {
366 int32_t count = i - 1;
367 if (count < 0) {
368 WTRACE("Shuld not be here!");
369 return DECODE_SUCCESS;
370 }
371 vbp_picture_data_mp42 *lastpic = data->picture_data;
372 for(int k = 0; k < count; k++ ) {
373 lastpic = lastpic->next_picture_data;
374 }
375 mPackedFrame.offSet = lastpic->slice_data.slice_offset + lastpic->slice_data.slice_size;
376 VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",mPackedFrame.offSet,mPackedFrame.timestamp);
377 return DECODE_MULTIPLE_FRAME;
378 }
379 }
380
381 // acquire a new surface buffer
382 status = acquireSurfaceBuffer();
383 CHECK_STATUS("acquireSurfaceBuffer");
384
385 // sprite is treated as P frame in the display order, so only B frame frame is not used as "reference"
386 mAcquiredBuffer->referenceFrame = (codingType != MP4_VOP_TYPE_B);
387 if (picData->picture_param.vol_fields.bits.interlaced) {
388 // only MPEG-4 studio profile can have field coding. All other profiles
389 // use frame coding only, i.e, there is no field VOP. (see vop_structure in MP4 spec)
390 mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
391 } else {
392 mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
393 }
394 // TODO: set discontinuity flag
395 mAcquiredBuffer->renderBuffer.flag = 0;
396 mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
397 if (mSizeChanged) {
398 mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
399 mSizeChanged = false;
400 }
401 if (codingType != MP4_VOP_TYPE_B) {
402 mLastVOPCodingType = codingType;
403 mLastVOPTimeIncrement = picData->vop_time_increment;
404 }
405
406 // start decoding a frame
407 vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
408 CHECK_VA_STATUS("vaBeginPicture");
409
410 mDecodingFrame = true;
411 mSendIQMatrixBuf = true;
412 }
413
414 status = decodeSlice(data, picData);
415 CHECK_STATUS("decodeSlice");
416 }
417
418 return DECODE_SUCCESS;
419 }
420
421
decodeSlice(vbp_data_mp42 * data,vbp_picture_data_mp42 * picData)422 Decode_Status VideoDecoderMPEG4::decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData) {
423 Decode_Status status;
424 VAStatus vaStatus;
425 uint32_t bufferIDCount = 0;
426 // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
427 VABufferID bufferIDs[4];
428
429 VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
430 vbp_slice_data_mp42 *sliceData = &(picData->slice_data);
431 VASliceParameterBufferMPEG4 *sliceParam = &(sliceData->slice_param);
432
433 // send picture parametre for each slice
434 status = setReference(picParam);
435 CHECK_STATUS("setReference");
436
437 vaStatus = vaCreateBuffer(
438 mVADisplay,
439 mVAContext,
440 VAPictureParameterBufferType,
441 sizeof(VAPictureParameterBufferMPEG4),
442 1,
443 picParam,
444 &bufferIDs[bufferIDCount]);
445 CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
446
447 bufferIDCount++;
448 if (picParam->vol_fields.bits.quant_type && mSendIQMatrixBuf)
449 {
450 // only send IQ matrix for the first slice in the picture
451 vaStatus = vaCreateBuffer(
452 mVADisplay,
453 mVAContext,
454 VAIQMatrixBufferType,
455 sizeof(VAIQMatrixBufferMPEG4),
456 1,
457 &(data->iq_matrix_buffer),
458 &bufferIDs[bufferIDCount]);
459 CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
460
461 mSendIQMatrixBuf = false;
462 bufferIDCount++;
463 }
464
465 vaStatus = vaCreateBuffer(
466 mVADisplay,
467 mVAContext,
468 VASliceParameterBufferType,
469 sizeof(VASliceParameterBufferMPEG4),
470 1,
471 sliceParam,
472 &bufferIDs[bufferIDCount]);
473 CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
474
475 bufferIDCount++;
476
477 //slice data buffer pointer
478 //Note that this is the original data buffer ptr;
479 // offset to the actual slice data is provided in
480 // slice_data_offset in VASliceParameterBufferMP42
481
482 vaStatus = vaCreateBuffer(
483 mVADisplay,
484 mVAContext,
485 VASliceDataBufferType,
486 sliceData->slice_size, //size
487 1, //num_elements
488 sliceData->buffer_addr + sliceData->slice_offset,
489 &bufferIDs[bufferIDCount]);
490 CHECK_VA_STATUS("vaCreateSliceDataBuffer");
491
492 bufferIDCount++;
493
494 vaStatus = vaRenderPicture(
495 mVADisplay,
496 mVAContext,
497 bufferIDs,
498 bufferIDCount);
499 CHECK_VA_STATUS("vaRenderPicture");
500
501
502 return DECODE_SUCCESS;
503 }
504
setReference(VAPictureParameterBufferMPEG4 * picParam)505 Decode_Status VideoDecoderMPEG4::setReference(VAPictureParameterBufferMPEG4 *picParam) {
506 switch (picParam->vop_fields.bits.vop_coding_type) {
507 case MP4_VOP_TYPE_I:
508 picParam->forward_reference_picture = VA_INVALID_SURFACE;
509 picParam->backward_reference_picture = VA_INVALID_SURFACE;
510 break;
511 case MP4_VOP_TYPE_P:
512 if (mLastReference == NULL && mIsSyncFrame == false && !mIsShortHeader) {
513 return DECODE_NO_REFERENCE;
514 }
515 if (mLastReference != NULL) {
516 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
517 } else {
518 VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__);
519 picParam->forward_reference_picture = VA_INVALID_SURFACE;
520 }
521 picParam->backward_reference_picture = VA_INVALID_SURFACE;
522 break;
523 case MP4_VOP_TYPE_B:
524 picParam->vop_fields.bits.backward_reference_vop_coding_type = mLastVOPCodingType;
525 // WEIRD, CHECK AGAIN !!!!!!!
526 if (mIsShortHeader) {
527 if (mLastReference != NULL) {
528 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
529 } else {
530 VTRACE("%s: no forward reference frame, but keep decoding", __FUNCTION__);
531 picParam->forward_reference_picture = VA_INVALID_SURFACE;
532 }
533 if (mForwardReference != NULL) {
534 picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
535 } else {
536 VTRACE("%s: no backward reference frame, but keep decoding", __FUNCTION__);
537 picParam->backward_reference_picture = VA_INVALID_SURFACE;
538 }
539 } else if (mLastReference == NULL || mForwardReference == NULL) {
540 return DECODE_NO_REFERENCE;
541 } else {
542 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
543 picParam->backward_reference_picture = mForwardReference->renderBuffer.surface;
544 }
545 break;
546 case MP4_VOP_TYPE_S:
547 // WEIRD, CHECK AGAIN!!!! WAS using mForwardReference
548 if (mLastReference == NULL) {
549 return DECODE_NO_REFERENCE;
550 }
551 picParam->forward_reference_picture = mLastReference->renderBuffer.surface;
552 picParam->backward_reference_picture = VA_INVALID_SURFACE;
553 break;
554
555 default:
556 // Will never reach here;
557 return DECODE_PARSER_FAIL;
558 }
559 return DECODE_SUCCESS;
560 }
561
startVA(vbp_data_mp42 * data)562 Decode_Status VideoDecoderMPEG4::startVA(vbp_data_mp42 *data) {
563 updateFormatInfo(data);
564
565 VAProfile vaProfile;
566
567 if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
568 vaProfile = VAProfileMPEG4AdvancedSimple;
569 } else {
570 vaProfile = VAProfileMPEG4Simple;
571 }
572
573 mIsShortHeader = data->codec_data.short_video_header;
574
575 return VideoDecoderBase::setupVA(MP4_SURFACE_NUMBER, vaProfile);
576 }
577
updateFormatInfo(vbp_data_mp42 * data)578 void VideoDecoderMPEG4::updateFormatInfo(vbp_data_mp42 *data) {
579 ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
580 mVideoFormatInfo.width, mVideoFormatInfo.height,
581 data->codec_data.video_object_layer_width,
582 data->codec_data.video_object_layer_height);
583
584 mVideoFormatInfo.cropBottom = data->codec_data.video_object_layer_height > mVideoFormatInfo.height ?
585 data->codec_data.video_object_layer_height - mVideoFormatInfo.height : 0;
586 mVideoFormatInfo.cropRight = data->codec_data.video_object_layer_width > mVideoFormatInfo.width ?
587 data->codec_data.video_object_layer_width - mVideoFormatInfo.width : 0;
588
589 if ((mVideoFormatInfo.width != (uint32_t)data->codec_data.video_object_layer_width ||
590 mVideoFormatInfo.height != (uint32_t)data->codec_data.video_object_layer_height) &&
591 data->codec_data.video_object_layer_width &&
592 data->codec_data.video_object_layer_height) {
593 // update encoded image size
594 mVideoFormatInfo.width = data->codec_data.video_object_layer_width;
595 mVideoFormatInfo.height = data->codec_data.video_object_layer_height;
596 mSizeChanged = true;
597 ITRACE("Video size is changed.");
598 }
599
600 // video_range has default value of 0. Y ranges from 16 to 235.
601 mVideoFormatInfo.videoRange = data->codec_data.video_range;
602
603 switch (data->codec_data.matrix_coefficients) {
604 case 1:
605 mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
606 break;
607
608 // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
609 // SMPTE 170M/BT601
610 case 5:
611 case 6:
612 mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
613 break;
614
615 default:
616 // unknown color matrix, set to 0 so color space flag will not be set.
617 mVideoFormatInfo.colorMatrix = 0;
618 break;
619 }
620
621 mVideoFormatInfo.aspectX = data->codec_data.par_width;
622 mVideoFormatInfo.aspectY = data->codec_data.par_height;
623 //mVideoFormatInfo.bitrate = data->codec_data.bit_rate;
624 mVideoFormatInfo.valid = true;
625
626 setRenderRect();
627 }
628
checkHardwareCapability()629 Decode_Status VideoDecoderMPEG4::checkHardwareCapability() {
630 VAStatus vaStatus;
631 VAConfigAttrib cfgAttribs[2];
632 cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
633 cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
634 vaStatus = vaGetConfigAttributes(mVADisplay,
635 mIsShortHeader ? VAProfileH263Baseline : VAProfileMPEG4AdvancedSimple,
636 VAEntrypointVLD, cfgAttribs, 2);
637 CHECK_VA_STATUS("vaGetConfigAttributes");
638 if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
639 ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
640 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
641 return DECODE_DRIVER_FAIL;
642 }
643
644 return DECODE_SUCCESS;
645 }
646