1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11
12 #include "vpx_config.h"
13 #include "vp8_rtcd.h"
14 #include "./vpx_scale_rtcd.h"
15 #include "onyxd_int.h"
16 #include "vp8/common/header.h"
17 #include "vp8/common/reconintra4x4.h"
18 #include "vp8/common/reconinter.h"
19 #include "detokenize.h"
20 #include "vp8/common/invtrans.h"
21 #include "vp8/common/alloccommon.h"
22 #include "vp8/common/entropymode.h"
23 #include "vp8/common/quant_common.h"
24 #include "vpx_scale/vpx_scale.h"
25 #include "vp8/common/setupintrarecon.h"
26
27 #include "decodemv.h"
28 #include "vp8/common/extend.h"
29 #if CONFIG_ERROR_CONCEALMENT
30 #include "error_concealment.h"
31 #endif
32 #include "vpx_mem/vpx_mem.h"
33 #include "vp8/common/threading.h"
34 #include "decoderthreading.h"
35 #include "dboolhuff.h"
36
37 #include <assert.h>
38 #include <stdio.h>
39
vp8cx_init_de_quantizer(VP8D_COMP * pbi)40 void vp8cx_init_de_quantizer(VP8D_COMP *pbi)
41 {
42 int Q;
43 VP8_COMMON *const pc = & pbi->common;
44
45 for (Q = 0; Q < QINDEX_RANGE; Q++)
46 {
47 pc->Y1dequant[Q][0] = (short)vp8_dc_quant(Q, pc->y1dc_delta_q);
48 pc->Y2dequant[Q][0] = (short)vp8_dc2quant(Q, pc->y2dc_delta_q);
49 pc->UVdequant[Q][0] = (short)vp8_dc_uv_quant(Q, pc->uvdc_delta_q);
50
51 pc->Y1dequant[Q][1] = (short)vp8_ac_yquant(Q);
52 pc->Y2dequant[Q][1] = (short)vp8_ac2quant(Q, pc->y2ac_delta_q);
53 pc->UVdequant[Q][1] = (short)vp8_ac_uv_quant(Q, pc->uvac_delta_q);
54 }
55 }
56
vp8_mb_init_dequantizer(VP8D_COMP * pbi,MACROBLOCKD * xd)57 void vp8_mb_init_dequantizer(VP8D_COMP *pbi, MACROBLOCKD *xd)
58 {
59 int i;
60 int QIndex;
61 MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
62 VP8_COMMON *const pc = & pbi->common;
63
64 /* Decide whether to use the default or alternate baseline Q value. */
65 if (xd->segmentation_enabled)
66 {
67 /* Abs Value */
68 if (xd->mb_segement_abs_delta == SEGMENT_ABSDATA)
69 QIndex = xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
70
71 /* Delta Value */
72 else
73 {
74 QIndex = pc->base_qindex + xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
75 QIndex = (QIndex >= 0) ? ((QIndex <= MAXQ) ? QIndex : MAXQ) : 0; /* Clamp to valid range */
76 }
77 }
78 else
79 QIndex = pc->base_qindex;
80
81 /* Set up the macroblock dequant constants */
82 xd->dequant_y1_dc[0] = 1;
83 xd->dequant_y1[0] = pc->Y1dequant[QIndex][0];
84 xd->dequant_y2[0] = pc->Y2dequant[QIndex][0];
85 xd->dequant_uv[0] = pc->UVdequant[QIndex][0];
86
87 for (i = 1; i < 16; i++)
88 {
89 xd->dequant_y1_dc[i] =
90 xd->dequant_y1[i] = pc->Y1dequant[QIndex][1];
91 xd->dequant_y2[i] = pc->Y2dequant[QIndex][1];
92 xd->dequant_uv[i] = pc->UVdequant[QIndex][1];
93 }
94 }
95
decode_macroblock(VP8D_COMP * pbi,MACROBLOCKD * xd,unsigned int mb_idx)96 static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd,
97 unsigned int mb_idx)
98 {
99 MB_PREDICTION_MODE mode;
100 int i;
101 #if CONFIG_ERROR_CONCEALMENT
102 int corruption_detected = 0;
103 #endif
104 (void)mb_idx;
105 if (xd->mode_info_context->mbmi.mb_skip_coeff)
106 {
107 vp8_reset_mb_tokens_context(xd);
108 }
109 else if (!vp8dx_bool_error(xd->current_bc))
110 {
111 int eobtotal;
112 eobtotal = vp8_decode_mb_tokens(pbi, xd);
113
114 /* Special case: Force the loopfilter to skip when eobtotal is zero */
115 xd->mode_info_context->mbmi.mb_skip_coeff = (eobtotal==0);
116 }
117
118 mode = xd->mode_info_context->mbmi.mode;
119
120 if (xd->segmentation_enabled)
121 vp8_mb_init_dequantizer(pbi, xd);
122
123
124 #if CONFIG_ERROR_CONCEALMENT
125
126 if(pbi->ec_active)
127 {
128 int throw_residual;
129 /* When we have independent partitions we can apply residual even
130 * though other partitions within the frame are corrupt.
131 */
132 throw_residual = (!pbi->independent_partitions &&
133 pbi->frame_corrupt_residual);
134 throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
135
136 if ((mb_idx >= pbi->mvs_corrupt_from_mb || throw_residual))
137 {
138 /* MB with corrupt residuals or corrupt mode/motion vectors.
139 * Better to use the predictor as reconstruction.
140 */
141 pbi->frame_corrupt_residual = 1;
142 vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
143 vp8_conceal_corrupt_mb(xd);
144
145
146 corruption_detected = 1;
147
148 /* force idct to be skipped for B_PRED and use the
149 * prediction only for reconstruction
150 * */
151 vpx_memset(xd->eobs, 0, 25);
152 }
153 }
154 #endif
155
156 /* do prediction */
157 if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME)
158 {
159 vp8_build_intra_predictors_mbuv_s(xd,
160 xd->recon_above[1],
161 xd->recon_above[2],
162 xd->recon_left[1],
163 xd->recon_left[2],
164 xd->recon_left_stride[1],
165 xd->dst.u_buffer, xd->dst.v_buffer,
166 xd->dst.uv_stride);
167
168 if (mode != B_PRED)
169 {
170 vp8_build_intra_predictors_mby_s(xd,
171 xd->recon_above[0],
172 xd->recon_left[0],
173 xd->recon_left_stride[0],
174 xd->dst.y_buffer,
175 xd->dst.y_stride);
176 }
177 else
178 {
179 short *DQC = xd->dequant_y1;
180 int dst_stride = xd->dst.y_stride;
181
182 /* clear out residual eob info */
183 if(xd->mode_info_context->mbmi.mb_skip_coeff)
184 vpx_memset(xd->eobs, 0, 25);
185
186 intra_prediction_down_copy(xd, xd->recon_above[0] + 16);
187
188 for (i = 0; i < 16; i++)
189 {
190 BLOCKD *b = &xd->block[i];
191 unsigned char *dst = xd->dst.y_buffer + b->offset;
192 B_PREDICTION_MODE b_mode =
193 xd->mode_info_context->bmi[i].as_mode;
194 unsigned char *Above = dst - dst_stride;
195 unsigned char *yleft = dst - 1;
196 int left_stride = dst_stride;
197 unsigned char top_left = Above[-1];
198
199 vp8_intra4x4_predict(Above, yleft, left_stride, b_mode,
200 dst, dst_stride, top_left);
201
202 if (xd->eobs[i])
203 {
204 if (xd->eobs[i] > 1)
205 {
206 vp8_dequant_idct_add(b->qcoeff, DQC, dst, dst_stride);
207 }
208 else
209 {
210 vp8_dc_only_idct_add
211 (b->qcoeff[0] * DQC[0],
212 dst, dst_stride,
213 dst, dst_stride);
214 vpx_memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
215 }
216 }
217 }
218 }
219 }
220 else
221 {
222 vp8_build_inter_predictors_mb(xd);
223 }
224
225
226 #if CONFIG_ERROR_CONCEALMENT
227 if (corruption_detected)
228 {
229 return;
230 }
231 #endif
232
233 if(!xd->mode_info_context->mbmi.mb_skip_coeff)
234 {
235 /* dequantization and idct */
236 if (mode != B_PRED)
237 {
238 short *DQC = xd->dequant_y1;
239
240 if (mode != SPLITMV)
241 {
242 BLOCKD *b = &xd->block[24];
243
244 /* do 2nd order transform on the dc block */
245 if (xd->eobs[24] > 1)
246 {
247 vp8_dequantize_b(b, xd->dequant_y2);
248
249 vp8_short_inv_walsh4x4(&b->dqcoeff[0],
250 xd->qcoeff);
251 vpx_memset(b->qcoeff, 0, 16 * sizeof(b->qcoeff[0]));
252 }
253 else
254 {
255 b->dqcoeff[0] = b->qcoeff[0] * xd->dequant_y2[0];
256 vp8_short_inv_walsh4x4_1(&b->dqcoeff[0],
257 xd->qcoeff);
258 vpx_memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
259 }
260
261 /* override the dc dequant constant in order to preserve the
262 * dc components
263 */
264 DQC = xd->dequant_y1_dc;
265 }
266
267 vp8_dequant_idct_add_y_block
268 (xd->qcoeff, DQC,
269 xd->dst.y_buffer,
270 xd->dst.y_stride, xd->eobs);
271 }
272
273 vp8_dequant_idct_add_uv_block
274 (xd->qcoeff+16*16, xd->dequant_uv,
275 xd->dst.u_buffer, xd->dst.v_buffer,
276 xd->dst.uv_stride, xd->eobs+16);
277 }
278 }
279
get_delta_q(vp8_reader * bc,int prev,int * q_update)280 static int get_delta_q(vp8_reader *bc, int prev, int *q_update)
281 {
282 int ret_val = 0;
283
284 if (vp8_read_bit(bc))
285 {
286 ret_val = vp8_read_literal(bc, 4);
287
288 if (vp8_read_bit(bc))
289 ret_val = -ret_val;
290 }
291
292 /* Trigger a quantizer update if the delta-q value has changed */
293 if (ret_val != prev)
294 *q_update = 1;
295
296 return ret_val;
297 }
298
299 #ifdef PACKET_TESTING
300 #include <stdio.h>
301 FILE *vpxlog = 0;
302 #endif
303
yv12_extend_frame_top_c(YV12_BUFFER_CONFIG * ybf)304 static void yv12_extend_frame_top_c(YV12_BUFFER_CONFIG *ybf)
305 {
306 int i;
307 unsigned char *src_ptr1;
308 unsigned char *dest_ptr1;
309
310 unsigned int Border;
311 int plane_stride;
312
313 /***********/
314 /* Y Plane */
315 /***********/
316 Border = ybf->border;
317 plane_stride = ybf->y_stride;
318 src_ptr1 = ybf->y_buffer - Border;
319 dest_ptr1 = src_ptr1 - (Border * plane_stride);
320
321 for (i = 0; i < (int)Border; i++)
322 {
323 vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
324 dest_ptr1 += plane_stride;
325 }
326
327
328 /***********/
329 /* U Plane */
330 /***********/
331 plane_stride = ybf->uv_stride;
332 Border /= 2;
333 src_ptr1 = ybf->u_buffer - Border;
334 dest_ptr1 = src_ptr1 - (Border * plane_stride);
335
336 for (i = 0; i < (int)(Border); i++)
337 {
338 vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
339 dest_ptr1 += plane_stride;
340 }
341
342 /***********/
343 /* V Plane */
344 /***********/
345
346 src_ptr1 = ybf->v_buffer - Border;
347 dest_ptr1 = src_ptr1 - (Border * plane_stride);
348
349 for (i = 0; i < (int)(Border); i++)
350 {
351 vpx_memcpy(dest_ptr1, src_ptr1, plane_stride);
352 dest_ptr1 += plane_stride;
353 }
354 }
355
yv12_extend_frame_bottom_c(YV12_BUFFER_CONFIG * ybf)356 static void yv12_extend_frame_bottom_c(YV12_BUFFER_CONFIG *ybf)
357 {
358 int i;
359 unsigned char *src_ptr1, *src_ptr2;
360 unsigned char *dest_ptr2;
361
362 unsigned int Border;
363 int plane_stride;
364 int plane_height;
365
366 /***********/
367 /* Y Plane */
368 /***********/
369 Border = ybf->border;
370 plane_stride = ybf->y_stride;
371 plane_height = ybf->y_height;
372
373 src_ptr1 = ybf->y_buffer - Border;
374 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
375 dest_ptr2 = src_ptr2 + plane_stride;
376
377 for (i = 0; i < (int)Border; i++)
378 {
379 vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
380 dest_ptr2 += plane_stride;
381 }
382
383
384 /***********/
385 /* U Plane */
386 /***********/
387 plane_stride = ybf->uv_stride;
388 plane_height = ybf->uv_height;
389 Border /= 2;
390
391 src_ptr1 = ybf->u_buffer - Border;
392 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
393 dest_ptr2 = src_ptr2 + plane_stride;
394
395 for (i = 0; i < (int)(Border); i++)
396 {
397 vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
398 dest_ptr2 += plane_stride;
399 }
400
401 /***********/
402 /* V Plane */
403 /***********/
404
405 src_ptr1 = ybf->v_buffer - Border;
406 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
407 dest_ptr2 = src_ptr2 + plane_stride;
408
409 for (i = 0; i < (int)(Border); i++)
410 {
411 vpx_memcpy(dest_ptr2, src_ptr2, plane_stride);
412 dest_ptr2 += plane_stride;
413 }
414 }
415
yv12_extend_frame_left_right_c(YV12_BUFFER_CONFIG * ybf,unsigned char * y_src,unsigned char * u_src,unsigned char * v_src)416 static void yv12_extend_frame_left_right_c(YV12_BUFFER_CONFIG *ybf,
417 unsigned char *y_src,
418 unsigned char *u_src,
419 unsigned char *v_src)
420 {
421 int i;
422 unsigned char *src_ptr1, *src_ptr2;
423 unsigned char *dest_ptr1, *dest_ptr2;
424
425 unsigned int Border;
426 int plane_stride;
427 int plane_height;
428 int plane_width;
429
430 /***********/
431 /* Y Plane */
432 /***********/
433 Border = ybf->border;
434 plane_stride = ybf->y_stride;
435 plane_height = 16;
436 plane_width = ybf->y_width;
437
438 /* copy the left and right most columns out */
439 src_ptr1 = y_src;
440 src_ptr2 = src_ptr1 + plane_width - 1;
441 dest_ptr1 = src_ptr1 - Border;
442 dest_ptr2 = src_ptr2 + 1;
443
444 for (i = 0; i < plane_height; i++)
445 {
446 vpx_memset(dest_ptr1, src_ptr1[0], Border);
447 vpx_memset(dest_ptr2, src_ptr2[0], Border);
448 src_ptr1 += plane_stride;
449 src_ptr2 += plane_stride;
450 dest_ptr1 += plane_stride;
451 dest_ptr2 += plane_stride;
452 }
453
454 /***********/
455 /* U Plane */
456 /***********/
457 plane_stride = ybf->uv_stride;
458 plane_height = 8;
459 plane_width = ybf->uv_width;
460 Border /= 2;
461
462 /* copy the left and right most columns out */
463 src_ptr1 = u_src;
464 src_ptr2 = src_ptr1 + plane_width - 1;
465 dest_ptr1 = src_ptr1 - Border;
466 dest_ptr2 = src_ptr2 + 1;
467
468 for (i = 0; i < plane_height; i++)
469 {
470 vpx_memset(dest_ptr1, src_ptr1[0], Border);
471 vpx_memset(dest_ptr2, src_ptr2[0], Border);
472 src_ptr1 += plane_stride;
473 src_ptr2 += plane_stride;
474 dest_ptr1 += plane_stride;
475 dest_ptr2 += plane_stride;
476 }
477
478 /***********/
479 /* V Plane */
480 /***********/
481
482 /* copy the left and right most columns out */
483 src_ptr1 = v_src;
484 src_ptr2 = src_ptr1 + plane_width - 1;
485 dest_ptr1 = src_ptr1 - Border;
486 dest_ptr2 = src_ptr2 + 1;
487
488 for (i = 0; i < plane_height; i++)
489 {
490 vpx_memset(dest_ptr1, src_ptr1[0], Border);
491 vpx_memset(dest_ptr2, src_ptr2[0], Border);
492 src_ptr1 += plane_stride;
493 src_ptr2 += plane_stride;
494 dest_ptr1 += plane_stride;
495 dest_ptr2 += plane_stride;
496 }
497 }
498
decode_mb_rows(VP8D_COMP * pbi)499 static void decode_mb_rows(VP8D_COMP *pbi)
500 {
501 VP8_COMMON *const pc = & pbi->common;
502 MACROBLOCKD *const xd = & pbi->mb;
503
504 MODE_INFO *lf_mic = xd->mode_info_context;
505
506 int ibc = 0;
507 int num_part = 1 << pc->multi_token_partition;
508
509 int recon_yoffset, recon_uvoffset;
510 int mb_row, mb_col;
511 int mb_idx = 0;
512
513 YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
514
515 int recon_y_stride = yv12_fb_new->y_stride;
516 int recon_uv_stride = yv12_fb_new->uv_stride;
517
518 unsigned char *ref_buffer[MAX_REF_FRAMES][3];
519 unsigned char *dst_buffer[3];
520 unsigned char *lf_dst[3];
521 unsigned char *eb_dst[3];
522 int i;
523 int ref_fb_corrupted[MAX_REF_FRAMES];
524
525 ref_fb_corrupted[INTRA_FRAME] = 0;
526
527 for(i = 1; i < MAX_REF_FRAMES; i++)
528 {
529 YV12_BUFFER_CONFIG *this_fb = pbi->dec_fb_ref[i];
530
531 ref_buffer[i][0] = this_fb->y_buffer;
532 ref_buffer[i][1] = this_fb->u_buffer;
533 ref_buffer[i][2] = this_fb->v_buffer;
534
535 ref_fb_corrupted[i] = this_fb->corrupted;
536 }
537
538 /* Set up the buffer pointers */
539 eb_dst[0] = lf_dst[0] = dst_buffer[0] = yv12_fb_new->y_buffer;
540 eb_dst[1] = lf_dst[1] = dst_buffer[1] = yv12_fb_new->u_buffer;
541 eb_dst[2] = lf_dst[2] = dst_buffer[2] = yv12_fb_new->v_buffer;
542
543 xd->up_available = 0;
544
545 /* Initialize the loop filter for this frame. */
546 if(pc->filter_level)
547 vp8_loop_filter_frame_init(pc, xd, pc->filter_level);
548
549 vp8_setup_intra_recon_top_line(yv12_fb_new);
550
551 /* Decode the individual macro block */
552 for (mb_row = 0; mb_row < pc->mb_rows; mb_row++)
553 {
554 if (num_part > 1)
555 {
556 xd->current_bc = & pbi->mbc[ibc];
557 ibc++;
558
559 if (ibc == num_part)
560 ibc = 0;
561 }
562
563 recon_yoffset = mb_row * recon_y_stride * 16;
564 recon_uvoffset = mb_row * recon_uv_stride * 8;
565
566 /* reset contexts */
567 xd->above_context = pc->above_context;
568 vpx_memset(xd->left_context, 0, sizeof(ENTROPY_CONTEXT_PLANES));
569
570 xd->left_available = 0;
571
572 xd->mb_to_top_edge = -((mb_row * 16) << 3);
573 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
574
575 xd->recon_above[0] = dst_buffer[0] + recon_yoffset;
576 xd->recon_above[1] = dst_buffer[1] + recon_uvoffset;
577 xd->recon_above[2] = dst_buffer[2] + recon_uvoffset;
578
579 xd->recon_left[0] = xd->recon_above[0] - 1;
580 xd->recon_left[1] = xd->recon_above[1] - 1;
581 xd->recon_left[2] = xd->recon_above[2] - 1;
582
583 xd->recon_above[0] -= xd->dst.y_stride;
584 xd->recon_above[1] -= xd->dst.uv_stride;
585 xd->recon_above[2] -= xd->dst.uv_stride;
586
587 /* TODO: move to outside row loop */
588 xd->recon_left_stride[0] = xd->dst.y_stride;
589 xd->recon_left_stride[1] = xd->dst.uv_stride;
590
591 setup_intra_recon_left(xd->recon_left[0], xd->recon_left[1],
592 xd->recon_left[2], xd->dst.y_stride,
593 xd->dst.uv_stride);
594
595 for (mb_col = 0; mb_col < pc->mb_cols; mb_col++)
596 {
597 /* Distance of Mb to the various image edges.
598 * These are specified to 8th pel as they are always compared to values
599 * that are in 1/8th pel units
600 */
601 xd->mb_to_left_edge = -((mb_col * 16) << 3);
602 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
603
604 #if CONFIG_ERROR_CONCEALMENT
605 {
606 int corrupt_residual = (!pbi->independent_partitions &&
607 pbi->frame_corrupt_residual) ||
608 vp8dx_bool_error(xd->current_bc);
609 if (pbi->ec_active &&
610 xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME &&
611 corrupt_residual)
612 {
613 /* We have an intra block with corrupt coefficients, better to
614 * conceal with an inter block. Interpolate MVs from neighboring
615 * MBs.
616 *
617 * Note that for the first mb with corrupt residual in a frame,
618 * we might not discover that before decoding the residual. That
619 * happens after this check, and therefore no inter concealment
620 * will be done.
621 */
622 vp8_interpolate_motion(xd,
623 mb_row, mb_col,
624 pc->mb_rows, pc->mb_cols,
625 pc->mode_info_stride);
626 }
627 }
628 #endif
629
630 xd->dst.y_buffer = dst_buffer[0] + recon_yoffset;
631 xd->dst.u_buffer = dst_buffer[1] + recon_uvoffset;
632 xd->dst.v_buffer = dst_buffer[2] + recon_uvoffset;
633
634 xd->pre.y_buffer = ref_buffer[xd->mode_info_context->mbmi.ref_frame][0] + recon_yoffset;
635 xd->pre.u_buffer = ref_buffer[xd->mode_info_context->mbmi.ref_frame][1] + recon_uvoffset;
636 xd->pre.v_buffer = ref_buffer[xd->mode_info_context->mbmi.ref_frame][2] + recon_uvoffset;
637
638 /* propagate errors from reference frames */
639 xd->corrupted |= ref_fb_corrupted[xd->mode_info_context->mbmi.ref_frame];
640
641 decode_macroblock(pbi, xd, mb_idx);
642
643 mb_idx++;
644 xd->left_available = 1;
645
646 /* check if the boolean decoder has suffered an error */
647 xd->corrupted |= vp8dx_bool_error(xd->current_bc);
648
649 xd->recon_above[0] += 16;
650 xd->recon_above[1] += 8;
651 xd->recon_above[2] += 8;
652 xd->recon_left[0] += 16;
653 xd->recon_left[1] += 8;
654 xd->recon_left[2] += 8;
655
656 recon_yoffset += 16;
657 recon_uvoffset += 8;
658
659 ++xd->mode_info_context; /* next mb */
660
661 xd->above_context++;
662 }
663
664 /* adjust to the next row of mbs */
665 vp8_extend_mb_row(yv12_fb_new, xd->dst.y_buffer + 16,
666 xd->dst.u_buffer + 8, xd->dst.v_buffer + 8);
667
668 ++xd->mode_info_context; /* skip prediction column */
669 xd->up_available = 1;
670
671 if(pc->filter_level)
672 {
673 if(mb_row > 0)
674 {
675 if (pc->filter_type == NORMAL_LOOPFILTER)
676 vp8_loop_filter_row_normal(pc, lf_mic, mb_row-1,
677 recon_y_stride, recon_uv_stride,
678 lf_dst[0], lf_dst[1], lf_dst[2]);
679 else
680 vp8_loop_filter_row_simple(pc, lf_mic, mb_row-1,
681 recon_y_stride, recon_uv_stride,
682 lf_dst[0], lf_dst[1], lf_dst[2]);
683 if(mb_row > 1)
684 {
685 yv12_extend_frame_left_right_c(yv12_fb_new,
686 eb_dst[0],
687 eb_dst[1],
688 eb_dst[2]);
689
690 eb_dst[0] += recon_y_stride * 16;
691 eb_dst[1] += recon_uv_stride * 8;
692 eb_dst[2] += recon_uv_stride * 8;
693 }
694
695 lf_dst[0] += recon_y_stride * 16;
696 lf_dst[1] += recon_uv_stride * 8;
697 lf_dst[2] += recon_uv_stride * 8;
698 lf_mic += pc->mb_cols;
699 lf_mic++; /* Skip border mb */
700 }
701 }
702 else
703 {
704 if(mb_row > 0)
705 {
706 /**/
707 yv12_extend_frame_left_right_c(yv12_fb_new,
708 eb_dst[0],
709 eb_dst[1],
710 eb_dst[2]);
711 eb_dst[0] += recon_y_stride * 16;
712 eb_dst[1] += recon_uv_stride * 8;
713 eb_dst[2] += recon_uv_stride * 8;
714 }
715 }
716 }
717
718 if(pc->filter_level)
719 {
720 if (pc->filter_type == NORMAL_LOOPFILTER)
721 vp8_loop_filter_row_normal(pc, lf_mic, mb_row-1, recon_y_stride,
722 recon_uv_stride, lf_dst[0], lf_dst[1],
723 lf_dst[2]);
724 else
725 vp8_loop_filter_row_simple(pc, lf_mic, mb_row-1, recon_y_stride,
726 recon_uv_stride, lf_dst[0], lf_dst[1],
727 lf_dst[2]);
728
729 yv12_extend_frame_left_right_c(yv12_fb_new,
730 eb_dst[0],
731 eb_dst[1],
732 eb_dst[2]);
733 eb_dst[0] += recon_y_stride * 16;
734 eb_dst[1] += recon_uv_stride * 8;
735 eb_dst[2] += recon_uv_stride * 8;
736 }
737 yv12_extend_frame_left_right_c(yv12_fb_new,
738 eb_dst[0],
739 eb_dst[1],
740 eb_dst[2]);
741 yv12_extend_frame_top_c(yv12_fb_new);
742 yv12_extend_frame_bottom_c(yv12_fb_new);
743
744 }
745
read_partition_size(VP8D_COMP * pbi,const unsigned char * cx_size)746 static unsigned int read_partition_size(VP8D_COMP *pbi,
747 const unsigned char *cx_size)
748 {
749 unsigned char temp[3];
750 if (pbi->decrypt_cb)
751 {
752 pbi->decrypt_cb(pbi->decrypt_state, cx_size, temp, 3);
753 cx_size = temp;
754 }
755 return cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16);
756 }
757
read_is_valid(const unsigned char * start,size_t len,const unsigned char * end)758 static int read_is_valid(const unsigned char *start,
759 size_t len,
760 const unsigned char *end)
761 {
762 return (start + len > start && start + len <= end);
763 }
764
read_available_partition_size(VP8D_COMP * pbi,const unsigned char * token_part_sizes,const unsigned char * fragment_start,const unsigned char * first_fragment_end,const unsigned char * fragment_end,int i,int num_part)765 static unsigned int read_available_partition_size(
766 VP8D_COMP *pbi,
767 const unsigned char *token_part_sizes,
768 const unsigned char *fragment_start,
769 const unsigned char *first_fragment_end,
770 const unsigned char *fragment_end,
771 int i,
772 int num_part)
773 {
774 VP8_COMMON* pc = &pbi->common;
775 const unsigned char *partition_size_ptr = token_part_sizes + i * 3;
776 unsigned int partition_size = 0;
777 ptrdiff_t bytes_left = fragment_end - fragment_start;
778 /* Calculate the length of this partition. The last partition
779 * size is implicit. If the partition size can't be read, then
780 * either use the remaining data in the buffer (for EC mode)
781 * or throw an error.
782 */
783 if (i < num_part - 1)
784 {
785 if (read_is_valid(partition_size_ptr, 3, first_fragment_end))
786 partition_size = read_partition_size(pbi, partition_size_ptr);
787 else if (pbi->ec_active)
788 partition_size = (unsigned int)bytes_left;
789 else
790 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
791 "Truncated partition size data");
792 }
793 else
794 partition_size = (unsigned int)bytes_left;
795
796 /* Validate the calculated partition length. If the buffer
797 * described by the partition can't be fully read, then restrict
798 * it to the portion that can be (for EC mode) or throw an error.
799 */
800 if (!read_is_valid(fragment_start, partition_size, fragment_end))
801 {
802 if (pbi->ec_active)
803 partition_size = (unsigned int)bytes_left;
804 else
805 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
806 "Truncated packet or corrupt partition "
807 "%d length", i + 1);
808 }
809 return partition_size;
810 }
811
812
setup_token_decoder(VP8D_COMP * pbi,const unsigned char * token_part_sizes)813 static void setup_token_decoder(VP8D_COMP *pbi,
814 const unsigned char* token_part_sizes)
815 {
816 vp8_reader *bool_decoder = &pbi->mbc[0];
817 unsigned int partition_idx;
818 unsigned int fragment_idx;
819 unsigned int num_token_partitions;
820 const unsigned char *first_fragment_end = pbi->fragments.ptrs[0] +
821 pbi->fragments.sizes[0];
822
823 TOKEN_PARTITION multi_token_partition =
824 (TOKEN_PARTITION)vp8_read_literal(&pbi->mbc[8], 2);
825 if (!vp8dx_bool_error(&pbi->mbc[8]))
826 pbi->common.multi_token_partition = multi_token_partition;
827 num_token_partitions = 1 << pbi->common.multi_token_partition;
828
829 /* Check for partitions within the fragments and unpack the fragments
830 * so that each fragment pointer points to its corresponding partition. */
831 for (fragment_idx = 0; fragment_idx < pbi->fragments.count; ++fragment_idx)
832 {
833 unsigned int fragment_size = pbi->fragments.sizes[fragment_idx];
834 const unsigned char *fragment_end = pbi->fragments.ptrs[fragment_idx] +
835 fragment_size;
836 /* Special case for handling the first partition since we have already
837 * read its size. */
838 if (fragment_idx == 0)
839 {
840 /* Size of first partition + token partition sizes element */
841 ptrdiff_t ext_first_part_size = token_part_sizes -
842 pbi->fragments.ptrs[0] + 3 * (num_token_partitions - 1);
843 fragment_size -= (unsigned int)ext_first_part_size;
844 if (fragment_size > 0)
845 {
846 pbi->fragments.sizes[0] = (unsigned int)ext_first_part_size;
847 /* The fragment contains an additional partition. Move to
848 * next. */
849 fragment_idx++;
850 pbi->fragments.ptrs[fragment_idx] = pbi->fragments.ptrs[0] +
851 pbi->fragments.sizes[0];
852 }
853 }
854 /* Split the chunk into partitions read from the bitstream */
855 while (fragment_size > 0)
856 {
857 ptrdiff_t partition_size = read_available_partition_size(
858 pbi,
859 token_part_sizes,
860 pbi->fragments.ptrs[fragment_idx],
861 first_fragment_end,
862 fragment_end,
863 fragment_idx - 1,
864 num_token_partitions);
865 pbi->fragments.sizes[fragment_idx] = (unsigned int)partition_size;
866 fragment_size -= (unsigned int)partition_size;
867 assert(fragment_idx <= num_token_partitions);
868 if (fragment_size > 0)
869 {
870 /* The fragment contains an additional partition.
871 * Move to next. */
872 fragment_idx++;
873 pbi->fragments.ptrs[fragment_idx] =
874 pbi->fragments.ptrs[fragment_idx - 1] + partition_size;
875 }
876 }
877 }
878
879 pbi->fragments.count = num_token_partitions + 1;
880
881 for (partition_idx = 1; partition_idx < pbi->fragments.count; ++partition_idx)
882 {
883 if (vp8dx_start_decode(bool_decoder,
884 pbi->fragments.ptrs[partition_idx],
885 pbi->fragments.sizes[partition_idx],
886 pbi->decrypt_cb, pbi->decrypt_state))
887 vpx_internal_error(&pbi->common.error, VPX_CODEC_MEM_ERROR,
888 "Failed to allocate bool decoder %d",
889 partition_idx);
890
891 bool_decoder++;
892 }
893
894 #if CONFIG_MULTITHREAD
895 /* Clamp number of decoder threads */
896 if (pbi->decoding_thread_count > num_token_partitions - 1)
897 pbi->decoding_thread_count = num_token_partitions - 1;
898 #endif
899 }
900
901
init_frame(VP8D_COMP * pbi)902 static void init_frame(VP8D_COMP *pbi)
903 {
904 VP8_COMMON *const pc = & pbi->common;
905 MACROBLOCKD *const xd = & pbi->mb;
906
907 if (pc->frame_type == KEY_FRAME)
908 {
909 /* Various keyframe initializations */
910 vpx_memcpy(pc->fc.mvc, vp8_default_mv_context, sizeof(vp8_default_mv_context));
911
912 vp8_init_mbmode_probs(pc);
913
914 vp8_default_coef_probs(pc);
915
916 /* reset the segment feature data to 0 with delta coding (Default state). */
917 vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
918 xd->mb_segement_abs_delta = SEGMENT_DELTADATA;
919
920 /* reset the mode ref deltasa for loop filter */
921 vpx_memset(xd->ref_lf_deltas, 0, sizeof(xd->ref_lf_deltas));
922 vpx_memset(xd->mode_lf_deltas, 0, sizeof(xd->mode_lf_deltas));
923
924 /* All buffers are implicitly updated on key frames. */
925 pc->refresh_golden_frame = 1;
926 pc->refresh_alt_ref_frame = 1;
927 pc->copy_buffer_to_gf = 0;
928 pc->copy_buffer_to_arf = 0;
929
930 /* Note that Golden and Altref modes cannot be used on a key frame so
931 * ref_frame_sign_bias[] is undefined and meaningless
932 */
933 pc->ref_frame_sign_bias[GOLDEN_FRAME] = 0;
934 pc->ref_frame_sign_bias[ALTREF_FRAME] = 0;
935 }
936 else
937 {
938 /* To enable choice of different interploation filters */
939 if (!pc->use_bilinear_mc_filter)
940 {
941 xd->subpixel_predict = vp8_sixtap_predict4x4;
942 xd->subpixel_predict8x4 = vp8_sixtap_predict8x4;
943 xd->subpixel_predict8x8 = vp8_sixtap_predict8x8;
944 xd->subpixel_predict16x16 = vp8_sixtap_predict16x16;
945 }
946 else
947 {
948 xd->subpixel_predict = vp8_bilinear_predict4x4;
949 xd->subpixel_predict8x4 = vp8_bilinear_predict8x4;
950 xd->subpixel_predict8x8 = vp8_bilinear_predict8x8;
951 xd->subpixel_predict16x16 = vp8_bilinear_predict16x16;
952 }
953
954 if (pbi->decoded_key_frame && pbi->ec_enabled && !pbi->ec_active)
955 pbi->ec_active = 1;
956 }
957
958 xd->left_context = &pc->left_context;
959 xd->mode_info_context = pc->mi;
960 xd->frame_type = pc->frame_type;
961 xd->mode_info_context->mbmi.mode = DC_PRED;
962 xd->mode_info_stride = pc->mode_info_stride;
963 xd->corrupted = 0; /* init without corruption */
964
965 xd->fullpixel_mask = 0xffffffff;
966 if(pc->full_pixel)
967 xd->fullpixel_mask = 0xfffffff8;
968
969 }
970
vp8_decode_frame(VP8D_COMP * pbi)971 int vp8_decode_frame(VP8D_COMP *pbi)
972 {
973 vp8_reader *const bc = &pbi->mbc[8];
974 VP8_COMMON *const pc = &pbi->common;
975 MACROBLOCKD *const xd = &pbi->mb;
976 const unsigned char *data = pbi->fragments.ptrs[0];
977 const unsigned char *data_end = data + pbi->fragments.sizes[0];
978 ptrdiff_t first_partition_length_in_bytes;
979
980 int i, j, k, l;
981 const int *const mb_feature_data_bits = vp8_mb_feature_data_bits;
982 int corrupt_tokens = 0;
983 int prev_independent_partitions = pbi->independent_partitions;
984
985 YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
986
987 /* start with no corruption of current frame */
988 xd->corrupted = 0;
989 yv12_fb_new->corrupted = 0;
990
991 if (data_end - data < 3)
992 {
993 if (!pbi->ec_active)
994 {
995 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
996 "Truncated packet");
997 }
998
999 /* Declare the missing frame as an inter frame since it will
1000 be handled as an inter frame when we have estimated its
1001 motion vectors. */
1002 pc->frame_type = INTER_FRAME;
1003 pc->version = 0;
1004 pc->show_frame = 1;
1005 first_partition_length_in_bytes = 0;
1006 }
1007 else
1008 {
1009 unsigned char clear_buffer[10];
1010 const unsigned char *clear = data;
1011 if (pbi->decrypt_cb)
1012 {
1013 int n = (int)(data_end - data);
1014 if (n > 10) n = 10;
1015 pbi->decrypt_cb(pbi->decrypt_state, data, clear_buffer, n);
1016 clear = clear_buffer;
1017 }
1018
1019 pc->frame_type = (FRAME_TYPE)(clear[0] & 1);
1020 pc->version = (clear[0] >> 1) & 7;
1021 pc->show_frame = (clear[0] >> 4) & 1;
1022 first_partition_length_in_bytes =
1023 (clear[0] | (clear[1] << 8) | (clear[2] << 16)) >> 5;
1024
1025 if (!pbi->ec_active &&
1026 (data + first_partition_length_in_bytes > data_end
1027 || data + first_partition_length_in_bytes < data))
1028 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
1029 "Truncated packet or corrupt partition 0 length");
1030
1031 data += 3;
1032 clear += 3;
1033
1034 vp8_setup_version(pc);
1035
1036
1037 if (pc->frame_type == KEY_FRAME)
1038 {
1039 /* vet via sync code */
1040 /* When error concealment is enabled we should only check the sync
1041 * code if we have enough bits available
1042 */
1043 if (!pbi->ec_active || data + 3 < data_end)
1044 {
1045 if (clear[0] != 0x9d || clear[1] != 0x01 || clear[2] != 0x2a)
1046 vpx_internal_error(&pc->error, VPX_CODEC_UNSUP_BITSTREAM,
1047 "Invalid frame sync code");
1048 }
1049
1050 /* If error concealment is enabled we should only parse the new size
1051 * if we have enough data. Otherwise we will end up with the wrong
1052 * size.
1053 */
1054 if (!pbi->ec_active || data + 6 < data_end)
1055 {
1056 pc->Width = (clear[3] | (clear[4] << 8)) & 0x3fff;
1057 pc->horiz_scale = clear[4] >> 6;
1058 pc->Height = (clear[5] | (clear[6] << 8)) & 0x3fff;
1059 pc->vert_scale = clear[6] >> 6;
1060 }
1061 data += 7;
1062 clear += 7;
1063 }
1064 else
1065 {
1066 vpx_memcpy(&xd->pre, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
1067 vpx_memcpy(&xd->dst, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
1068 }
1069 }
1070 if ((!pbi->decoded_key_frame && pc->frame_type != KEY_FRAME))
1071 {
1072 return -1;
1073 }
1074
1075 init_frame(pbi);
1076
1077 if (vp8dx_start_decode(bc, data, (unsigned int)(data_end - data),
1078 pbi->decrypt_cb, pbi->decrypt_state))
1079 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
1080 "Failed to allocate bool decoder 0");
1081 if (pc->frame_type == KEY_FRAME) {
1082 (void)vp8_read_bit(bc); // colorspace
1083 pc->clamp_type = (CLAMP_TYPE)vp8_read_bit(bc);
1084 }
1085
1086 /* Is segmentation enabled */
1087 xd->segmentation_enabled = (unsigned char)vp8_read_bit(bc);
1088
1089 if (xd->segmentation_enabled)
1090 {
1091 /* Signal whether or not the segmentation map is being explicitly updated this frame. */
1092 xd->update_mb_segmentation_map = (unsigned char)vp8_read_bit(bc);
1093 xd->update_mb_segmentation_data = (unsigned char)vp8_read_bit(bc);
1094
1095 if (xd->update_mb_segmentation_data)
1096 {
1097 xd->mb_segement_abs_delta = (unsigned char)vp8_read_bit(bc);
1098
1099 vpx_memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
1100
1101 /* For each segmentation feature (Quant and loop filter level) */
1102 for (i = 0; i < MB_LVL_MAX; i++)
1103 {
1104 for (j = 0; j < MAX_MB_SEGMENTS; j++)
1105 {
1106 /* Frame level data */
1107 if (vp8_read_bit(bc))
1108 {
1109 xd->segment_feature_data[i][j] = (signed char)vp8_read_literal(bc, mb_feature_data_bits[i]);
1110
1111 if (vp8_read_bit(bc))
1112 xd->segment_feature_data[i][j] = -xd->segment_feature_data[i][j];
1113 }
1114 else
1115 xd->segment_feature_data[i][j] = 0;
1116 }
1117 }
1118 }
1119
1120 if (xd->update_mb_segmentation_map)
1121 {
1122 /* Which macro block level features are enabled */
1123 vpx_memset(xd->mb_segment_tree_probs, 255, sizeof(xd->mb_segment_tree_probs));
1124
1125 /* Read the probs used to decode the segment id for each macro block. */
1126 for (i = 0; i < MB_FEATURE_TREE_PROBS; i++)
1127 {
1128 /* If not explicitly set value is defaulted to 255 by memset above */
1129 if (vp8_read_bit(bc))
1130 xd->mb_segment_tree_probs[i] = (vp8_prob)vp8_read_literal(bc, 8);
1131 }
1132 }
1133 }
1134 else
1135 {
1136 /* No segmentation updates on this frame */
1137 xd->update_mb_segmentation_map = 0;
1138 xd->update_mb_segmentation_data = 0;
1139 }
1140
1141 /* Read the loop filter level and type */
1142 pc->filter_type = (LOOPFILTERTYPE) vp8_read_bit(bc);
1143 pc->filter_level = vp8_read_literal(bc, 6);
1144 pc->sharpness_level = vp8_read_literal(bc, 3);
1145
1146 /* Read in loop filter deltas applied at the MB level based on mode or ref frame. */
1147 xd->mode_ref_lf_delta_update = 0;
1148 xd->mode_ref_lf_delta_enabled = (unsigned char)vp8_read_bit(bc);
1149
1150 if (xd->mode_ref_lf_delta_enabled)
1151 {
1152 /* Do the deltas need to be updated */
1153 xd->mode_ref_lf_delta_update = (unsigned char)vp8_read_bit(bc);
1154
1155 if (xd->mode_ref_lf_delta_update)
1156 {
1157 /* Send update */
1158 for (i = 0; i < MAX_REF_LF_DELTAS; i++)
1159 {
1160 if (vp8_read_bit(bc))
1161 {
1162 /*sign = vp8_read_bit( bc );*/
1163 xd->ref_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1164
1165 if (vp8_read_bit(bc)) /* Apply sign */
1166 xd->ref_lf_deltas[i] = xd->ref_lf_deltas[i] * -1;
1167 }
1168 }
1169
1170 /* Send update */
1171 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
1172 {
1173 if (vp8_read_bit(bc))
1174 {
1175 /*sign = vp8_read_bit( bc );*/
1176 xd->mode_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1177
1178 if (vp8_read_bit(bc)) /* Apply sign */
1179 xd->mode_lf_deltas[i] = xd->mode_lf_deltas[i] * -1;
1180 }
1181 }
1182 }
1183 }
1184
1185 setup_token_decoder(pbi, data + first_partition_length_in_bytes);
1186
1187 xd->current_bc = &pbi->mbc[0];
1188
1189 /* Read the default quantizers. */
1190 {
1191 int Q, q_update;
1192
1193 Q = vp8_read_literal(bc, 7); /* AC 1st order Q = default */
1194 pc->base_qindex = Q;
1195 q_update = 0;
1196 pc->y1dc_delta_q = get_delta_q(bc, pc->y1dc_delta_q, &q_update);
1197 pc->y2dc_delta_q = get_delta_q(bc, pc->y2dc_delta_q, &q_update);
1198 pc->y2ac_delta_q = get_delta_q(bc, pc->y2ac_delta_q, &q_update);
1199 pc->uvdc_delta_q = get_delta_q(bc, pc->uvdc_delta_q, &q_update);
1200 pc->uvac_delta_q = get_delta_q(bc, pc->uvac_delta_q, &q_update);
1201
1202 if (q_update)
1203 vp8cx_init_de_quantizer(pbi);
1204
1205 /* MB level dequantizer setup */
1206 vp8_mb_init_dequantizer(pbi, &pbi->mb);
1207 }
1208
1209 /* Determine if the golden frame or ARF buffer should be updated and how.
1210 * For all non key frames the GF and ARF refresh flags and sign bias
1211 * flags must be set explicitly.
1212 */
1213 if (pc->frame_type != KEY_FRAME)
1214 {
1215 /* Should the GF or ARF be updated from the current frame */
1216 pc->refresh_golden_frame = vp8_read_bit(bc);
1217 #if CONFIG_ERROR_CONCEALMENT
1218 /* Assume we shouldn't refresh golden if the bit is missing */
1219 xd->corrupted |= vp8dx_bool_error(bc);
1220 if (pbi->ec_active && xd->corrupted)
1221 pc->refresh_golden_frame = 0;
1222 #endif
1223
1224 pc->refresh_alt_ref_frame = vp8_read_bit(bc);
1225 #if CONFIG_ERROR_CONCEALMENT
1226 /* Assume we shouldn't refresh altref if the bit is missing */
1227 xd->corrupted |= vp8dx_bool_error(bc);
1228 if (pbi->ec_active && xd->corrupted)
1229 pc->refresh_alt_ref_frame = 0;
1230 #endif
1231
1232 /* Buffer to buffer copy flags. */
1233 pc->copy_buffer_to_gf = 0;
1234
1235 if (!pc->refresh_golden_frame)
1236 pc->copy_buffer_to_gf = vp8_read_literal(bc, 2);
1237
1238 #if CONFIG_ERROR_CONCEALMENT
1239 /* Assume we shouldn't copy to the golden if the bit is missing */
1240 xd->corrupted |= vp8dx_bool_error(bc);
1241 if (pbi->ec_active && xd->corrupted)
1242 pc->copy_buffer_to_gf = 0;
1243 #endif
1244
1245 pc->copy_buffer_to_arf = 0;
1246
1247 if (!pc->refresh_alt_ref_frame)
1248 pc->copy_buffer_to_arf = vp8_read_literal(bc, 2);
1249
1250 #if CONFIG_ERROR_CONCEALMENT
1251 /* Assume we shouldn't copy to the alt-ref if the bit is missing */
1252 xd->corrupted |= vp8dx_bool_error(bc);
1253 if (pbi->ec_active && xd->corrupted)
1254 pc->copy_buffer_to_arf = 0;
1255 #endif
1256
1257
1258 pc->ref_frame_sign_bias[GOLDEN_FRAME] = vp8_read_bit(bc);
1259 pc->ref_frame_sign_bias[ALTREF_FRAME] = vp8_read_bit(bc);
1260 }
1261
1262 pc->refresh_entropy_probs = vp8_read_bit(bc);
1263 #if CONFIG_ERROR_CONCEALMENT
1264 /* Assume we shouldn't refresh the probabilities if the bit is
1265 * missing */
1266 xd->corrupted |= vp8dx_bool_error(bc);
1267 if (pbi->ec_active && xd->corrupted)
1268 pc->refresh_entropy_probs = 0;
1269 #endif
1270 if (pc->refresh_entropy_probs == 0)
1271 {
1272 vpx_memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc));
1273 }
1274
1275 pc->refresh_last_frame = pc->frame_type == KEY_FRAME || vp8_read_bit(bc);
1276
1277 #if CONFIG_ERROR_CONCEALMENT
1278 /* Assume we should refresh the last frame if the bit is missing */
1279 xd->corrupted |= vp8dx_bool_error(bc);
1280 if (pbi->ec_active && xd->corrupted)
1281 pc->refresh_last_frame = 1;
1282 #endif
1283
1284 if (0)
1285 {
1286 FILE *z = fopen("decodestats.stt", "a");
1287 fprintf(z, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n",
1288 pc->current_video_frame,
1289 pc->frame_type,
1290 pc->refresh_golden_frame,
1291 pc->refresh_alt_ref_frame,
1292 pc->refresh_last_frame,
1293 pc->base_qindex);
1294 fclose(z);
1295 }
1296
1297 {
1298 pbi->independent_partitions = 1;
1299
1300 /* read coef probability tree */
1301 for (i = 0; i < BLOCK_TYPES; i++)
1302 for (j = 0; j < COEF_BANDS; j++)
1303 for (k = 0; k < PREV_COEF_CONTEXTS; k++)
1304 for (l = 0; l < ENTROPY_NODES; l++)
1305 {
1306
1307 vp8_prob *const p = pc->fc.coef_probs [i][j][k] + l;
1308
1309 if (vp8_read(bc, vp8_coef_update_probs [i][j][k][l]))
1310 {
1311 *p = (vp8_prob)vp8_read_literal(bc, 8);
1312
1313 }
1314 if (k > 0 && *p != pc->fc.coef_probs[i][j][k-1][l])
1315 pbi->independent_partitions = 0;
1316
1317 }
1318 }
1319
1320 /* clear out the coeff buffer */
1321 vpx_memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
1322
1323 vp8_decode_mode_mvs(pbi);
1324
1325 #if CONFIG_ERROR_CONCEALMENT
1326 if (pbi->ec_active &&
1327 pbi->mvs_corrupt_from_mb < (unsigned int)pc->mb_cols * pc->mb_rows)
1328 {
1329 /* Motion vectors are missing in this frame. We will try to estimate
1330 * them and then continue decoding the frame as usual */
1331 vp8_estimate_missing_mvs(pbi);
1332 }
1333 #endif
1334
1335 vpx_memset(pc->above_context, 0, sizeof(ENTROPY_CONTEXT_PLANES) * pc->mb_cols);
1336 pbi->frame_corrupt_residual = 0;
1337
1338 #if CONFIG_MULTITHREAD
1339 if (pbi->b_multithreaded_rd && pc->multi_token_partition != ONE_PARTITION)
1340 {
1341 unsigned int thread;
1342 vp8mt_decode_mb_rows(pbi, xd);
1343 vp8_yv12_extend_frame_borders(yv12_fb_new);
1344 for (thread = 0; thread < pbi->decoding_thread_count; ++thread)
1345 corrupt_tokens |= pbi->mb_row_di[thread].mbd.corrupted;
1346 }
1347 else
1348 #endif
1349 {
1350 decode_mb_rows(pbi);
1351 corrupt_tokens |= xd->corrupted;
1352 }
1353
1354 /* Collect information about decoder corruption. */
1355 /* 1. Check first boolean decoder for errors. */
1356 yv12_fb_new->corrupted = vp8dx_bool_error(bc);
1357 /* 2. Check the macroblock information */
1358 yv12_fb_new->corrupted |= corrupt_tokens;
1359
1360 if (!pbi->decoded_key_frame)
1361 {
1362 if (pc->frame_type == KEY_FRAME &&
1363 !yv12_fb_new->corrupted)
1364 pbi->decoded_key_frame = 1;
1365 else
1366 vpx_internal_error(&pbi->common.error, VPX_CODEC_CORRUPT_FRAME,
1367 "A stream must start with a complete key frame");
1368 }
1369
1370 /* vpx_log("Decoder: Frame Decoded, Size Roughly:%d bytes \n",bc->pos+pbi->bc2.pos); */
1371
1372 if (pc->refresh_entropy_probs == 0)
1373 {
1374 vpx_memcpy(&pc->fc, &pc->lfc, sizeof(pc->fc));
1375 pbi->independent_partitions = prev_independent_partitions;
1376 }
1377
1378 #ifdef PACKET_TESTING
1379 {
1380 FILE *f = fopen("decompressor.VP8", "ab");
1381 unsigned int size = pbi->bc2.pos + pbi->bc.pos + 8;
1382 fwrite((void *) &size, 4, 1, f);
1383 fwrite((void *) pbi->Source, size, 1, f);
1384 fclose(f);
1385 }
1386 #endif
1387
1388 return 0;
1389 }
1390