1 /*
2  * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include <math.h>
12 #include <stdio.h>
13 #include <limits.h>
14 
15 #include "./vp9_rtcd.h"
16 #include "./vpx_config.h"
17 #include "./vpx_dsp_rtcd.h"
18 #include "./vpx_scale_rtcd.h"
19 #include "vpx/internal/vpx_psnr.h"
20 #include "vpx_dsp/vpx_dsp_common.h"
21 #include "vpx_dsp/vpx_filter.h"
22 #if CONFIG_INTERNAL_STATS
23 #include "vpx_dsp/ssim.h"
24 #endif
25 #include "vpx_ports/mem.h"
26 #include "vpx_ports/system_state.h"
27 #include "vpx_ports/vpx_timer.h"
28 
29 #include "vp9/common/vp9_alloccommon.h"
30 #include "vp9/common/vp9_filter.h"
31 #include "vp9/common/vp9_idct.h"
32 #if CONFIG_VP9_POSTPROC
33 #include "vp9/common/vp9_postproc.h"
34 #endif
35 #include "vp9/common/vp9_reconinter.h"
36 #include "vp9/common/vp9_reconintra.h"
37 #include "vp9/common/vp9_tile_common.h"
38 
39 #include "vp9/encoder/vp9_aq_complexity.h"
40 #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
41 #include "vp9/encoder/vp9_aq_variance.h"
42 #include "vp9/encoder/vp9_bitstream.h"
43 #include "vp9/encoder/vp9_context_tree.h"
44 #include "vp9/encoder/vp9_encodeframe.h"
45 #include "vp9/encoder/vp9_encodemv.h"
46 #include "vp9/encoder/vp9_encoder.h"
47 #include "vp9/encoder/vp9_ethread.h"
48 #include "vp9/encoder/vp9_firstpass.h"
49 #include "vp9/encoder/vp9_mbgraph.h"
50 #include "vp9/encoder/vp9_picklpf.h"
51 #include "vp9/encoder/vp9_ratectrl.h"
52 #include "vp9/encoder/vp9_rd.h"
53 #include "vp9/encoder/vp9_resize.h"
54 #include "vp9/encoder/vp9_segmentation.h"
55 #include "vp9/encoder/vp9_skin_detection.h"
56 #include "vp9/encoder/vp9_speed_features.h"
57 #include "vp9/encoder/vp9_svc_layercontext.h"
58 #include "vp9/encoder/vp9_temporal_filter.h"
59 
60 #define AM_SEGMENT_ID_INACTIVE 7
61 #define AM_SEGMENT_ID_ACTIVE 0
62 
63 #define SHARP_FILTER_QTHRESH 0          /* Q threshold for 8-tap sharp filter */
64 
65 #define ALTREF_HIGH_PRECISION_MV 1      // Whether to use high precision mv
66                                          //  for altref computation.
67 #define HIGH_PRECISION_MV_QTHRESH 200   // Q threshold for high precision
68                                          // mv. Choose a very high value for
69                                          // now so that HIGH_PRECISION is always
70                                          // chosen.
71 // #define OUTPUT_YUV_REC
72 
73 #ifdef OUTPUT_YUV_DENOISED
74 FILE *yuv_denoised_file = NULL;
75 #endif
76 #ifdef OUTPUT_YUV_SKINMAP
77 FILE *yuv_skinmap_file = NULL;
78 #endif
79 #ifdef OUTPUT_YUV_REC
80 FILE *yuv_rec_file;
81 #endif
82 
83 #if 0
84 FILE *framepsnr;
85 FILE *kf_list;
86 FILE *keyfile;
87 #endif
88 
Scale2Ratio(VPX_SCALING mode,int * hr,int * hs)89 static INLINE void Scale2Ratio(VPX_SCALING mode, int *hr, int *hs) {
90   switch (mode) {
91     case NORMAL:
92       *hr = 1;
93       *hs = 1;
94       break;
95     case FOURFIVE:
96       *hr = 4;
97       *hs = 5;
98       break;
99     case THREEFIVE:
100       *hr = 3;
101       *hs = 5;
102     break;
103     case ONETWO:
104       *hr = 1;
105       *hs = 2;
106     break;
107     default:
108       *hr = 1;
109       *hs = 1;
110        assert(0);
111       break;
112   }
113 }
114 
115 // Mark all inactive blocks as active. Other segmentation features may be set
116 // so memset cannot be used, instead only inactive blocks should be reset.
suppress_active_map(VP9_COMP * cpi)117 static void suppress_active_map(VP9_COMP *cpi) {
118   unsigned char *const seg_map = cpi->segmentation_map;
119   int i;
120   if (cpi->active_map.enabled || cpi->active_map.update)
121     for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
122       if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
123         seg_map[i] = AM_SEGMENT_ID_ACTIVE;
124 }
125 
apply_active_map(VP9_COMP * cpi)126 static void apply_active_map(VP9_COMP *cpi) {
127   struct segmentation *const seg = &cpi->common.seg;
128   unsigned char *const seg_map = cpi->segmentation_map;
129   const unsigned char *const active_map = cpi->active_map.map;
130   int i;
131 
132   assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
133 
134   if (frame_is_intra_only(&cpi->common)) {
135     cpi->active_map.enabled = 0;
136     cpi->active_map.update = 1;
137   }
138 
139   if (cpi->active_map.update) {
140     if (cpi->active_map.enabled) {
141       for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
142         if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
143       vp9_enable_segmentation(seg);
144       vp9_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
145       vp9_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF);
146       // Setting the data to -MAX_LOOP_FILTER will result in the computed loop
147       // filter level being zero regardless of the value of seg->abs_delta.
148       vp9_set_segdata(seg, AM_SEGMENT_ID_INACTIVE,
149                       SEG_LVL_ALT_LF, -MAX_LOOP_FILTER);
150     } else {
151       vp9_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
152       vp9_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF);
153       if (seg->enabled) {
154         seg->update_data = 1;
155         seg->update_map = 1;
156       }
157     }
158     cpi->active_map.update = 0;
159   }
160 }
161 
vp9_set_active_map(VP9_COMP * cpi,unsigned char * new_map_16x16,int rows,int cols)162 int vp9_set_active_map(VP9_COMP* cpi,
163                        unsigned char* new_map_16x16,
164                        int rows,
165                        int cols) {
166   if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
167     unsigned char *const active_map_8x8 = cpi->active_map.map;
168     const int mi_rows = cpi->common.mi_rows;
169     const int mi_cols = cpi->common.mi_cols;
170     cpi->active_map.update = 1;
171     if (new_map_16x16) {
172       int r, c;
173       for (r = 0; r < mi_rows; ++r) {
174         for (c = 0; c < mi_cols; ++c) {
175           active_map_8x8[r * mi_cols + c] =
176               new_map_16x16[(r >> 1) * cols + (c >> 1)]
177                   ? AM_SEGMENT_ID_ACTIVE
178                   : AM_SEGMENT_ID_INACTIVE;
179         }
180       }
181       cpi->active_map.enabled = 1;
182     } else {
183       cpi->active_map.enabled = 0;
184     }
185     return 0;
186   } else {
187     return -1;
188   }
189 }
190 
vp9_get_active_map(VP9_COMP * cpi,unsigned char * new_map_16x16,int rows,int cols)191 int vp9_get_active_map(VP9_COMP* cpi,
192                        unsigned char* new_map_16x16,
193                        int rows,
194                        int cols) {
195   if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols &&
196       new_map_16x16) {
197     unsigned char* const seg_map_8x8 = cpi->segmentation_map;
198     const int mi_rows = cpi->common.mi_rows;
199     const int mi_cols = cpi->common.mi_cols;
200     memset(new_map_16x16, !cpi->active_map.enabled, rows * cols);
201     if (cpi->active_map.enabled) {
202       int r, c;
203       for (r = 0; r < mi_rows; ++r) {
204         for (c = 0; c < mi_cols; ++c) {
205           // Cyclic refresh segments are considered active despite not having
206           // AM_SEGMENT_ID_ACTIVE
207           new_map_16x16[(r >> 1) * cols + (c >> 1)] |=
208               seg_map_8x8[r * mi_cols + c] != AM_SEGMENT_ID_INACTIVE;
209         }
210       }
211     }
212     return 0;
213   } else {
214     return -1;
215   }
216 }
217 
vp9_set_high_precision_mv(VP9_COMP * cpi,int allow_high_precision_mv)218 void vp9_set_high_precision_mv(VP9_COMP *cpi, int allow_high_precision_mv) {
219   MACROBLOCK *const mb = &cpi->td.mb;
220   cpi->common.allow_high_precision_mv = allow_high_precision_mv;
221   if (cpi->common.allow_high_precision_mv) {
222     mb->mvcost = mb->nmvcost_hp;
223     mb->mvsadcost = mb->nmvsadcost_hp;
224   } else {
225     mb->mvcost = mb->nmvcost;
226     mb->mvsadcost = mb->nmvsadcost;
227   }
228 }
229 
setup_frame(VP9_COMP * cpi)230 static void setup_frame(VP9_COMP *cpi) {
231   VP9_COMMON *const cm = &cpi->common;
232   // Set up entropy context depending on frame type. The decoder mandates
233   // the use of the default context, index 0, for keyframes and inter
234   // frames where the error_resilient_mode or intra_only flag is set. For
235   // other inter-frames the encoder currently uses only two contexts;
236   // context 1 for ALTREF frames and context 0 for the others.
237   if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
238     vp9_setup_past_independence(cm);
239   } else {
240     if (!cpi->use_svc)
241       cm->frame_context_idx = cpi->refresh_alt_ref_frame;
242   }
243 
244   if (cm->frame_type == KEY_FRAME) {
245     if (!is_two_pass_svc(cpi))
246       cpi->refresh_golden_frame = 1;
247     cpi->refresh_alt_ref_frame = 1;
248     vp9_zero(cpi->interp_filter_selected);
249   } else {
250     *cm->fc = cm->frame_contexts[cm->frame_context_idx];
251     vp9_zero(cpi->interp_filter_selected[0]);
252   }
253 }
254 
vp9_enc_setup_mi(VP9_COMMON * cm)255 static void vp9_enc_setup_mi(VP9_COMMON *cm) {
256   int i;
257   cm->mi = cm->mip + cm->mi_stride + 1;
258   memset(cm->mip, 0, cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mip));
259   cm->prev_mi = cm->prev_mip + cm->mi_stride + 1;
260   // Clear top border row
261   memset(cm->prev_mip, 0, sizeof(*cm->prev_mip) * cm->mi_stride);
262   // Clear left border column
263   for (i = 1; i < cm->mi_rows + 1; ++i)
264     memset(&cm->prev_mip[i * cm->mi_stride], 0, sizeof(*cm->prev_mip));
265 
266   cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1;
267   cm->prev_mi_grid_visible = cm->prev_mi_grid_base + cm->mi_stride + 1;
268 
269   memset(cm->mi_grid_base, 0,
270          cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mi_grid_base));
271 }
272 
vp9_enc_alloc_mi(VP9_COMMON * cm,int mi_size)273 static int vp9_enc_alloc_mi(VP9_COMMON *cm, int mi_size) {
274   cm->mip = vpx_calloc(mi_size, sizeof(*cm->mip));
275   if (!cm->mip)
276     return 1;
277   cm->prev_mip = vpx_calloc(mi_size, sizeof(*cm->prev_mip));
278   if (!cm->prev_mip)
279     return 1;
280   cm->mi_alloc_size = mi_size;
281 
282   cm->mi_grid_base = (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO*));
283   if (!cm->mi_grid_base)
284     return 1;
285   cm->prev_mi_grid_base = (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO*));
286   if (!cm->prev_mi_grid_base)
287     return 1;
288 
289   return 0;
290 }
291 
vp9_enc_free_mi(VP9_COMMON * cm)292 static void vp9_enc_free_mi(VP9_COMMON *cm) {
293   vpx_free(cm->mip);
294   cm->mip = NULL;
295   vpx_free(cm->prev_mip);
296   cm->prev_mip = NULL;
297   vpx_free(cm->mi_grid_base);
298   cm->mi_grid_base = NULL;
299   vpx_free(cm->prev_mi_grid_base);
300   cm->prev_mi_grid_base = NULL;
301 }
302 
vp9_swap_mi_and_prev_mi(VP9_COMMON * cm)303 static void vp9_swap_mi_and_prev_mi(VP9_COMMON *cm) {
304   // Current mip will be the prev_mip for the next frame.
305   MODE_INFO **temp_base = cm->prev_mi_grid_base;
306   MODE_INFO *temp = cm->prev_mip;
307   cm->prev_mip = cm->mip;
308   cm->mip = temp;
309 
310   // Update the upper left visible macroblock ptrs.
311   cm->mi = cm->mip + cm->mi_stride + 1;
312   cm->prev_mi = cm->prev_mip + cm->mi_stride + 1;
313 
314   cm->prev_mi_grid_base = cm->mi_grid_base;
315   cm->mi_grid_base = temp_base;
316   cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1;
317   cm->prev_mi_grid_visible = cm->prev_mi_grid_base + cm->mi_stride + 1;
318 }
319 
vp9_initialize_enc(void)320 void vp9_initialize_enc(void) {
321   static volatile int init_done = 0;
322 
323   if (!init_done) {
324     vp9_rtcd();
325     vpx_dsp_rtcd();
326     vpx_scale_rtcd();
327     vp9_init_intra_predictors();
328     vp9_init_me_luts();
329     vp9_rc_init_minq_luts();
330     vp9_entropy_mv_init();
331     vp9_temporal_filter_init();
332     init_done = 1;
333   }
334 }
335 
dealloc_compressor_data(VP9_COMP * cpi)336 static void dealloc_compressor_data(VP9_COMP *cpi) {
337   VP9_COMMON *const cm = &cpi->common;
338   int i;
339 
340   vpx_free(cpi->mbmi_ext_base);
341   cpi->mbmi_ext_base = NULL;
342 
343   vpx_free(cpi->tile_data);
344   cpi->tile_data = NULL;
345 
346   // Delete sementation map
347   vpx_free(cpi->segmentation_map);
348   cpi->segmentation_map = NULL;
349   vpx_free(cpi->coding_context.last_frame_seg_map_copy);
350   cpi->coding_context.last_frame_seg_map_copy = NULL;
351 
352   vpx_free(cpi->nmvcosts[0]);
353   vpx_free(cpi->nmvcosts[1]);
354   cpi->nmvcosts[0] = NULL;
355   cpi->nmvcosts[1] = NULL;
356 
357   vpx_free(cpi->nmvcosts_hp[0]);
358   vpx_free(cpi->nmvcosts_hp[1]);
359   cpi->nmvcosts_hp[0] = NULL;
360   cpi->nmvcosts_hp[1] = NULL;
361 
362   vpx_free(cpi->nmvsadcosts[0]);
363   vpx_free(cpi->nmvsadcosts[1]);
364   cpi->nmvsadcosts[0] = NULL;
365   cpi->nmvsadcosts[1] = NULL;
366 
367   vpx_free(cpi->nmvsadcosts_hp[0]);
368   vpx_free(cpi->nmvsadcosts_hp[1]);
369   cpi->nmvsadcosts_hp[0] = NULL;
370   cpi->nmvsadcosts_hp[1] = NULL;
371 
372   vp9_cyclic_refresh_free(cpi->cyclic_refresh);
373   cpi->cyclic_refresh = NULL;
374 
375   vpx_free(cpi->active_map.map);
376   cpi->active_map.map = NULL;
377 
378   vp9_free_ref_frame_buffers(cm->buffer_pool);
379 #if CONFIG_VP9_POSTPROC
380   vp9_free_postproc_buffers(cm);
381 #endif
382   vp9_free_context_buffers(cm);
383 
384   vpx_free_frame_buffer(&cpi->last_frame_uf);
385   vpx_free_frame_buffer(&cpi->scaled_source);
386   vpx_free_frame_buffer(&cpi->scaled_last_source);
387   vpx_free_frame_buffer(&cpi->alt_ref_buffer);
388   vp9_lookahead_destroy(cpi->lookahead);
389 
390   vpx_free(cpi->tile_tok[0][0]);
391   cpi->tile_tok[0][0] = 0;
392 
393   vp9_free_pc_tree(&cpi->td);
394 
395   for (i = 0; i < cpi->svc.number_spatial_layers; ++i) {
396     LAYER_CONTEXT *const lc = &cpi->svc.layer_context[i];
397     vpx_free(lc->rc_twopass_stats_in.buf);
398     lc->rc_twopass_stats_in.buf = NULL;
399     lc->rc_twopass_stats_in.sz = 0;
400   }
401 
402   if (cpi->source_diff_var != NULL) {
403     vpx_free(cpi->source_diff_var);
404     cpi->source_diff_var = NULL;
405   }
406 
407   for (i = 0; i < MAX_LAG_BUFFERS; ++i) {
408     vpx_free_frame_buffer(&cpi->svc.scaled_frames[i]);
409   }
410   memset(&cpi->svc.scaled_frames[0], 0,
411          MAX_LAG_BUFFERS * sizeof(cpi->svc.scaled_frames[0]));
412 
413   vpx_free_frame_buffer(&cpi->svc.empty_frame.img);
414   memset(&cpi->svc.empty_frame, 0, sizeof(cpi->svc.empty_frame));
415 
416   vp9_free_svc_cyclic_refresh(cpi);
417 }
418 
save_coding_context(VP9_COMP * cpi)419 static void save_coding_context(VP9_COMP *cpi) {
420   CODING_CONTEXT *const cc = &cpi->coding_context;
421   VP9_COMMON *cm = &cpi->common;
422 
423   // Stores a snapshot of key state variables which can subsequently be
424   // restored with a call to vp9_restore_coding_context. These functions are
425   // intended for use in a re-code loop in vp9_compress_frame where the
426   // quantizer value is adjusted between loop iterations.
427   vp9_copy(cc->nmvjointcost,  cpi->td.mb.nmvjointcost);
428 
429   memcpy(cc->nmvcosts[0], cpi->nmvcosts[0],
430          MV_VALS * sizeof(*cpi->nmvcosts[0]));
431   memcpy(cc->nmvcosts[1], cpi->nmvcosts[1],
432          MV_VALS * sizeof(*cpi->nmvcosts[1]));
433   memcpy(cc->nmvcosts_hp[0], cpi->nmvcosts_hp[0],
434          MV_VALS * sizeof(*cpi->nmvcosts_hp[0]));
435   memcpy(cc->nmvcosts_hp[1], cpi->nmvcosts_hp[1],
436          MV_VALS * sizeof(*cpi->nmvcosts_hp[1]));
437 
438   vp9_copy(cc->segment_pred_probs, cm->seg.pred_probs);
439 
440   memcpy(cpi->coding_context.last_frame_seg_map_copy,
441          cm->last_frame_seg_map, (cm->mi_rows * cm->mi_cols));
442 
443   vp9_copy(cc->last_ref_lf_deltas, cm->lf.last_ref_deltas);
444   vp9_copy(cc->last_mode_lf_deltas, cm->lf.last_mode_deltas);
445 
446   cc->fc = *cm->fc;
447 }
448 
restore_coding_context(VP9_COMP * cpi)449 static void restore_coding_context(VP9_COMP *cpi) {
450   CODING_CONTEXT *const cc = &cpi->coding_context;
451   VP9_COMMON *cm = &cpi->common;
452 
453   // Restore key state variables to the snapshot state stored in the
454   // previous call to vp9_save_coding_context.
455   vp9_copy(cpi->td.mb.nmvjointcost, cc->nmvjointcost);
456 
457   memcpy(cpi->nmvcosts[0], cc->nmvcosts[0], MV_VALS * sizeof(*cc->nmvcosts[0]));
458   memcpy(cpi->nmvcosts[1], cc->nmvcosts[1], MV_VALS * sizeof(*cc->nmvcosts[1]));
459   memcpy(cpi->nmvcosts_hp[0], cc->nmvcosts_hp[0],
460          MV_VALS * sizeof(*cc->nmvcosts_hp[0]));
461   memcpy(cpi->nmvcosts_hp[1], cc->nmvcosts_hp[1],
462          MV_VALS * sizeof(*cc->nmvcosts_hp[1]));
463 
464   vp9_copy(cm->seg.pred_probs, cc->segment_pred_probs);
465 
466   memcpy(cm->last_frame_seg_map,
467          cpi->coding_context.last_frame_seg_map_copy,
468          (cm->mi_rows * cm->mi_cols));
469 
470   vp9_copy(cm->lf.last_ref_deltas, cc->last_ref_lf_deltas);
471   vp9_copy(cm->lf.last_mode_deltas, cc->last_mode_lf_deltas);
472 
473   *cm->fc = cc->fc;
474 }
475 
configure_static_seg_features(VP9_COMP * cpi)476 static void configure_static_seg_features(VP9_COMP *cpi) {
477   VP9_COMMON *const cm = &cpi->common;
478   const RATE_CONTROL *const rc = &cpi->rc;
479   struct segmentation *const seg = &cm->seg;
480 
481   int high_q = (int)(rc->avg_q > 48.0);
482   int qi_delta;
483 
484   // Disable and clear down for KF
485   if (cm->frame_type == KEY_FRAME) {
486     // Clear down the global segmentation map
487     memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
488     seg->update_map = 0;
489     seg->update_data = 0;
490     cpi->static_mb_pct = 0;
491 
492     // Disable segmentation
493     vp9_disable_segmentation(seg);
494 
495     // Clear down the segment features.
496     vp9_clearall_segfeatures(seg);
497   } else if (cpi->refresh_alt_ref_frame) {
498     // If this is an alt ref frame
499     // Clear down the global segmentation map
500     memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
501     seg->update_map = 0;
502     seg->update_data = 0;
503     cpi->static_mb_pct = 0;
504 
505     // Disable segmentation and individual segment features by default
506     vp9_disable_segmentation(seg);
507     vp9_clearall_segfeatures(seg);
508 
509     // Scan frames from current to arf frame.
510     // This function re-enables segmentation if appropriate.
511     vp9_update_mbgraph_stats(cpi);
512 
513     // If segmentation was enabled set those features needed for the
514     // arf itself.
515     if (seg->enabled) {
516       seg->update_map = 1;
517       seg->update_data = 1;
518 
519       qi_delta = vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875,
520                                     cm->bit_depth);
521       vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
522       vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
523 
524       vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
525       vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
526 
527       // Where relevant assume segment data is delta data
528       seg->abs_delta = SEGMENT_DELTADATA;
529     }
530   } else if (seg->enabled) {
531     // All other frames if segmentation has been enabled
532 
533     // First normal frame in a valid gf or alt ref group
534     if (rc->frames_since_golden == 0) {
535       // Set up segment features for normal frames in an arf group
536       if (rc->source_alt_ref_active) {
537         seg->update_map = 0;
538         seg->update_data = 1;
539         seg->abs_delta = SEGMENT_DELTADATA;
540 
541         qi_delta = vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125,
542                                       cm->bit_depth);
543         vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
544         vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
545 
546         vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
547         vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
548 
549         // Segment coding disabled for compred testing
550         if (high_q || (cpi->static_mb_pct == 100)) {
551           vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
552           vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
553           vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
554         }
555       } else {
556         // Disable segmentation and clear down features if alt ref
557         // is not active for this group
558 
559         vp9_disable_segmentation(seg);
560 
561         memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
562 
563         seg->update_map = 0;
564         seg->update_data = 0;
565 
566         vp9_clearall_segfeatures(seg);
567       }
568     } else if (rc->is_src_frame_alt_ref) {
569       // Special case where we are coding over the top of a previous
570       // alt ref frame.
571       // Segment coding disabled for compred testing
572 
573       // Enable ref frame features for segment 0 as well
574       vp9_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
575       vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
576 
577       // All mbs should use ALTREF_FRAME
578       vp9_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
579       vp9_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
580       vp9_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
581       vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
582 
583       // Skip all MBs if high Q (0,0 mv and skip coeffs)
584       if (high_q) {
585         vp9_enable_segfeature(seg, 0, SEG_LVL_SKIP);
586         vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
587       }
588       // Enable data update
589       seg->update_data = 1;
590     } else {
591       // All other frames.
592 
593       // No updates.. leave things as they are.
594       seg->update_map = 0;
595       seg->update_data = 0;
596     }
597   }
598 }
599 
update_reference_segmentation_map(VP9_COMP * cpi)600 static void update_reference_segmentation_map(VP9_COMP *cpi) {
601   VP9_COMMON *const cm = &cpi->common;
602   MODE_INFO **mi_8x8_ptr = cm->mi_grid_visible;
603   uint8_t *cache_ptr = cm->last_frame_seg_map;
604   int row, col;
605 
606   for (row = 0; row < cm->mi_rows; row++) {
607     MODE_INFO **mi_8x8 = mi_8x8_ptr;
608     uint8_t *cache = cache_ptr;
609     for (col = 0; col < cm->mi_cols; col++, mi_8x8++, cache++)
610       cache[0] = mi_8x8[0]->mbmi.segment_id;
611     mi_8x8_ptr += cm->mi_stride;
612     cache_ptr += cm->mi_cols;
613   }
614 }
615 
alloc_raw_frame_buffers(VP9_COMP * cpi)616 static void alloc_raw_frame_buffers(VP9_COMP *cpi) {
617   VP9_COMMON *cm = &cpi->common;
618   const VP9EncoderConfig *oxcf = &cpi->oxcf;
619 
620   if (!cpi->lookahead)
621     cpi->lookahead = vp9_lookahead_init(oxcf->width, oxcf->height,
622                                         cm->subsampling_x, cm->subsampling_y,
623 #if CONFIG_VP9_HIGHBITDEPTH
624                                       cm->use_highbitdepth,
625 #endif
626                                       oxcf->lag_in_frames);
627   if (!cpi->lookahead)
628     vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
629                        "Failed to allocate lag buffers");
630 
631   // TODO(agrange) Check if ARF is enabled and skip allocation if not.
632   if (vpx_realloc_frame_buffer(&cpi->alt_ref_buffer,
633                                oxcf->width, oxcf->height,
634                                cm->subsampling_x, cm->subsampling_y,
635 #if CONFIG_VP9_HIGHBITDEPTH
636                                cm->use_highbitdepth,
637 #endif
638                                VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
639                                NULL, NULL, NULL))
640     vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
641                        "Failed to allocate altref buffer");
642 }
643 
alloc_util_frame_buffers(VP9_COMP * cpi)644 static void alloc_util_frame_buffers(VP9_COMP *cpi) {
645   VP9_COMMON *const cm = &cpi->common;
646   if (vpx_realloc_frame_buffer(&cpi->last_frame_uf,
647                                cm->width, cm->height,
648                                cm->subsampling_x, cm->subsampling_y,
649 #if CONFIG_VP9_HIGHBITDEPTH
650                                cm->use_highbitdepth,
651 #endif
652                                VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
653                                NULL, NULL, NULL))
654     vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
655                        "Failed to allocate last frame buffer");
656 
657   if (vpx_realloc_frame_buffer(&cpi->scaled_source,
658                                cm->width, cm->height,
659                                cm->subsampling_x, cm->subsampling_y,
660 #if CONFIG_VP9_HIGHBITDEPTH
661                                cm->use_highbitdepth,
662 #endif
663                                VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
664                                NULL, NULL, NULL))
665     vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
666                        "Failed to allocate scaled source buffer");
667 
668   if (vpx_realloc_frame_buffer(&cpi->scaled_last_source,
669                                cm->width, cm->height,
670                                cm->subsampling_x, cm->subsampling_y,
671 #if CONFIG_VP9_HIGHBITDEPTH
672                                cm->use_highbitdepth,
673 #endif
674                                VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
675                                NULL, NULL, NULL))
676     vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
677                        "Failed to allocate scaled last source buffer");
678 }
679 
680 
alloc_context_buffers_ext(VP9_COMP * cpi)681 static int alloc_context_buffers_ext(VP9_COMP *cpi) {
682   VP9_COMMON *cm = &cpi->common;
683   int mi_size = cm->mi_cols * cm->mi_rows;
684 
685   cpi->mbmi_ext_base = vpx_calloc(mi_size, sizeof(*cpi->mbmi_ext_base));
686   if (!cpi->mbmi_ext_base)
687     return 1;
688 
689   return 0;
690 }
691 
alloc_compressor_data(VP9_COMP * cpi)692 static void alloc_compressor_data(VP9_COMP *cpi) {
693   VP9_COMMON *cm = &cpi->common;
694 
695   vp9_alloc_context_buffers(cm, cm->width, cm->height);
696 
697   alloc_context_buffers_ext(cpi);
698 
699   vpx_free(cpi->tile_tok[0][0]);
700 
701   {
702     unsigned int tokens = get_token_alloc(cm->mb_rows, cm->mb_cols);
703     CHECK_MEM_ERROR(cm, cpi->tile_tok[0][0],
704         vpx_calloc(tokens, sizeof(*cpi->tile_tok[0][0])));
705   }
706 
707   vp9_setup_pc_tree(&cpi->common, &cpi->td);
708 }
709 
vp9_new_framerate(VP9_COMP * cpi,double framerate)710 void vp9_new_framerate(VP9_COMP *cpi, double framerate) {
711   cpi->framerate = framerate < 0.1 ? 30 : framerate;
712   vp9_rc_update_framerate(cpi);
713 }
714 
set_tile_limits(VP9_COMP * cpi)715 static void set_tile_limits(VP9_COMP *cpi) {
716   VP9_COMMON *const cm = &cpi->common;
717 
718   int min_log2_tile_cols, max_log2_tile_cols;
719   vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
720 
721   if (is_two_pass_svc(cpi) &&
722       (cpi->svc.encode_empty_frame_state == ENCODING ||
723       cpi->svc.number_spatial_layers > 1)) {
724     cm->log2_tile_cols = 0;
725     cm->log2_tile_rows = 0;
726   } else {
727     cm->log2_tile_cols = clamp(cpi->oxcf.tile_columns,
728                                min_log2_tile_cols, max_log2_tile_cols);
729     cm->log2_tile_rows = cpi->oxcf.tile_rows;
730   }
731 }
732 
update_frame_size(VP9_COMP * cpi)733 static void update_frame_size(VP9_COMP *cpi) {
734   VP9_COMMON *const cm = &cpi->common;
735   MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
736 
737   vp9_set_mb_mi(cm, cm->width, cm->height);
738   vp9_init_context_buffers(cm);
739   vp9_init_macroblockd(cm, xd, NULL);
740   cpi->td.mb.mbmi_ext_base = cpi->mbmi_ext_base;
741   memset(cpi->mbmi_ext_base, 0,
742          cm->mi_rows * cm->mi_cols * sizeof(*cpi->mbmi_ext_base));
743 
744   set_tile_limits(cpi);
745 
746   if (is_two_pass_svc(cpi)) {
747     if (vpx_realloc_frame_buffer(&cpi->alt_ref_buffer,
748                                  cm->width, cm->height,
749                                  cm->subsampling_x, cm->subsampling_y,
750 #if CONFIG_VP9_HIGHBITDEPTH
751                                  cm->use_highbitdepth,
752 #endif
753                                  VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
754                                  NULL, NULL, NULL))
755       vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
756                          "Failed to reallocate alt_ref_buffer");
757   }
758 }
759 
init_buffer_indices(VP9_COMP * cpi)760 static void init_buffer_indices(VP9_COMP *cpi) {
761   cpi->lst_fb_idx = 0;
762   cpi->gld_fb_idx = 1;
763   cpi->alt_fb_idx = 2;
764 }
765 
init_config(struct VP9_COMP * cpi,VP9EncoderConfig * oxcf)766 static void init_config(struct VP9_COMP *cpi, VP9EncoderConfig *oxcf) {
767   VP9_COMMON *const cm = &cpi->common;
768 
769   cpi->oxcf = *oxcf;
770   cpi->framerate = oxcf->init_framerate;
771 
772   cm->profile = oxcf->profile;
773   cm->bit_depth = oxcf->bit_depth;
774 #if CONFIG_VP9_HIGHBITDEPTH
775   cm->use_highbitdepth = oxcf->use_highbitdepth;
776 #endif
777   cm->color_space = oxcf->color_space;
778   cm->color_range = oxcf->color_range;
779 
780   cm->width = oxcf->width;
781   cm->height = oxcf->height;
782   alloc_compressor_data(cpi);
783 
784   cpi->svc.temporal_layering_mode = oxcf->temporal_layering_mode;
785 
786   // Single thread case: use counts in common.
787   cpi->td.counts = &cm->counts;
788 
789   // Spatial scalability.
790   cpi->svc.number_spatial_layers = oxcf->ss_number_layers;
791   // Temporal scalability.
792   cpi->svc.number_temporal_layers = oxcf->ts_number_layers;
793 
794   if ((cpi->svc.number_temporal_layers > 1 && cpi->oxcf.rc_mode == VPX_CBR) ||
795       ((cpi->svc.number_temporal_layers > 1 ||
796         cpi->svc.number_spatial_layers > 1) &&
797        cpi->oxcf.pass != 1)) {
798     vp9_init_layer_context(cpi);
799   }
800 
801   // change includes all joint functionality
802   vp9_change_config(cpi, oxcf);
803 
804   cpi->static_mb_pct = 0;
805   cpi->ref_frame_flags = 0;
806 
807   init_buffer_indices(cpi);
808 }
809 
set_rc_buffer_sizes(RATE_CONTROL * rc,const VP9EncoderConfig * oxcf)810 static void set_rc_buffer_sizes(RATE_CONTROL *rc,
811                                 const VP9EncoderConfig *oxcf) {
812   const int64_t bandwidth = oxcf->target_bandwidth;
813   const int64_t starting = oxcf->starting_buffer_level_ms;
814   const int64_t optimal = oxcf->optimal_buffer_level_ms;
815   const int64_t maximum = oxcf->maximum_buffer_size_ms;
816 
817   rc->starting_buffer_level = starting * bandwidth / 1000;
818   rc->optimal_buffer_level = (optimal == 0) ? bandwidth / 8
819                                             : optimal * bandwidth / 1000;
820   rc->maximum_buffer_size = (maximum == 0) ? bandwidth / 8
821                                            : maximum * bandwidth / 1000;
822 }
823 
824 #if CONFIG_VP9_HIGHBITDEPTH
825 #define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX3F, SDX8F, SDX4DF) \
826     cpi->fn_ptr[BT].sdf = SDF; \
827     cpi->fn_ptr[BT].sdaf = SDAF; \
828     cpi->fn_ptr[BT].vf = VF; \
829     cpi->fn_ptr[BT].svf = SVF; \
830     cpi->fn_ptr[BT].svaf = SVAF; \
831     cpi->fn_ptr[BT].sdx3f = SDX3F; \
832     cpi->fn_ptr[BT].sdx8f = SDX8F; \
833     cpi->fn_ptr[BT].sdx4df = SDX4DF;
834 
835 #define MAKE_BFP_SAD_WRAPPER(fnname) \
836 static unsigned int fnname##_bits8(const uint8_t *src_ptr, \
837                                    int source_stride, \
838                                    const uint8_t *ref_ptr, \
839                                    int ref_stride) {  \
840   return fnname(src_ptr, source_stride, ref_ptr, ref_stride); \
841 } \
842 static unsigned int fnname##_bits10(const uint8_t *src_ptr, \
843                                     int source_stride, \
844                                     const uint8_t *ref_ptr, \
845                                     int ref_stride) {  \
846   return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 2; \
847 } \
848 static unsigned int fnname##_bits12(const uint8_t *src_ptr, \
849                                     int source_stride, \
850                                     const uint8_t *ref_ptr, \
851                                     int ref_stride) {  \
852   return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 4; \
853 }
854 
855 #define MAKE_BFP_SADAVG_WRAPPER(fnname) static unsigned int \
856 fnname##_bits8(const uint8_t *src_ptr, \
857                int source_stride, \
858                const uint8_t *ref_ptr, \
859                int ref_stride, \
860                const uint8_t *second_pred) {  \
861   return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred); \
862 } \
863 static unsigned int fnname##_bits10(const uint8_t *src_ptr, \
864                                     int source_stride, \
865                                     const uint8_t *ref_ptr, \
866                                     int ref_stride, \
867                                     const uint8_t *second_pred) {  \
868   return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
869                 second_pred) >> 2; \
870 } \
871 static unsigned int fnname##_bits12(const uint8_t *src_ptr, \
872                                     int source_stride, \
873                                     const uint8_t *ref_ptr, \
874                                     int ref_stride, \
875                                     const uint8_t *second_pred) {  \
876   return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
877                 second_pred) >> 4; \
878 }
879 
880 #define MAKE_BFP_SAD3_WRAPPER(fnname) \
881 static void fnname##_bits8(const uint8_t *src_ptr, \
882                            int source_stride, \
883                            const uint8_t *ref_ptr, \
884                            int  ref_stride, \
885                            unsigned int *sad_array) {  \
886   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
887 } \
888 static void fnname##_bits10(const uint8_t *src_ptr, \
889                             int source_stride, \
890                             const uint8_t *ref_ptr, \
891                             int  ref_stride, \
892                             unsigned int *sad_array) {  \
893   int i; \
894   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
895   for (i = 0; i < 3; i++) \
896     sad_array[i] >>= 2; \
897 } \
898 static void fnname##_bits12(const uint8_t *src_ptr, \
899                             int source_stride, \
900                             const uint8_t *ref_ptr, \
901                             int  ref_stride, \
902                             unsigned int *sad_array) {  \
903   int i; \
904   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
905   for (i = 0; i < 3; i++) \
906     sad_array[i] >>= 4; \
907 }
908 
909 #define MAKE_BFP_SAD8_WRAPPER(fnname) \
910 static void fnname##_bits8(const uint8_t *src_ptr, \
911                            int source_stride, \
912                            const uint8_t *ref_ptr, \
913                            int  ref_stride, \
914                            unsigned int *sad_array) {  \
915   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
916 } \
917 static void fnname##_bits10(const uint8_t *src_ptr, \
918                             int source_stride, \
919                             const uint8_t *ref_ptr, \
920                             int  ref_stride, \
921                             unsigned int *sad_array) {  \
922   int i; \
923   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
924   for (i = 0; i < 8; i++) \
925     sad_array[i] >>= 2; \
926 } \
927 static void fnname##_bits12(const uint8_t *src_ptr, \
928                             int source_stride, \
929                             const uint8_t *ref_ptr, \
930                             int  ref_stride, \
931                             unsigned int *sad_array) {  \
932   int i; \
933   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
934   for (i = 0; i < 8; i++) \
935     sad_array[i] >>= 4; \
936 }
937 #define MAKE_BFP_SAD4D_WRAPPER(fnname) \
938 static void fnname##_bits8(const uint8_t *src_ptr, \
939                            int source_stride, \
940                            const uint8_t* const ref_ptr[], \
941                            int  ref_stride, \
942                            unsigned int *sad_array) {  \
943   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
944 } \
945 static void fnname##_bits10(const uint8_t *src_ptr, \
946                             int source_stride, \
947                             const uint8_t* const ref_ptr[], \
948                             int  ref_stride, \
949                             unsigned int *sad_array) {  \
950   int i; \
951   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
952   for (i = 0; i < 4; i++) \
953   sad_array[i] >>= 2; \
954 } \
955 static void fnname##_bits12(const uint8_t *src_ptr, \
956                             int source_stride, \
957                             const uint8_t* const ref_ptr[], \
958                             int  ref_stride, \
959                             unsigned int *sad_array) {  \
960   int i; \
961   fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
962   for (i = 0; i < 4; i++) \
963   sad_array[i] >>= 4; \
964 }
965 
966 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x16)
MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x16_avg)967 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x16_avg)
968 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x16x4d)
969 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x32)
970 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x32_avg)
971 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x32x4d)
972 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad64x32)
973 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad64x32_avg)
974 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad64x32x4d)
975 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x64)
976 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x64_avg)
977 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x64x4d)
978 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x32)
979 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x32_avg)
980 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad32x32x3)
981 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad32x32x8)
982 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x32x4d)
983 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad64x64)
984 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad64x64_avg)
985 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad64x64x3)
986 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad64x64x8)
987 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad64x64x4d)
988 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x16)
989 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x16_avg)
990 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad16x16x3)
991 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad16x16x8)
992 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x16x4d)
993 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x8)
994 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x8_avg)
995 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad16x8x3)
996 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad16x8x8)
997 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x8x4d)
998 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x16)
999 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x16_avg)
1000 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad8x16x3)
1001 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad8x16x8)
1002 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x16x4d)
1003 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x8)
1004 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x8_avg)
1005 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad8x8x3)
1006 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad8x8x8)
1007 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x8x4d)
1008 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x4)
1009 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x4_avg)
1010 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad8x4x8)
1011 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x4x4d)
1012 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad4x8)
1013 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad4x8_avg)
1014 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad4x8x8)
1015 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad4x8x4d)
1016 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad4x4)
1017 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad4x4_avg)
1018 MAKE_BFP_SAD3_WRAPPER(vpx_highbd_sad4x4x3)
1019 MAKE_BFP_SAD8_WRAPPER(vpx_highbd_sad4x4x8)
1020 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad4x4x4d)
1021 
1022 static void  highbd_set_var_fns(VP9_COMP *const cpi) {
1023   VP9_COMMON *const cm = &cpi->common;
1024   if (cm->use_highbitdepth) {
1025     switch (cm->bit_depth) {
1026       case VPX_BITS_8:
1027         HIGHBD_BFP(BLOCK_32X16,
1028                    vpx_highbd_sad32x16_bits8,
1029                    vpx_highbd_sad32x16_avg_bits8,
1030                    vpx_highbd_8_variance32x16,
1031                    vpx_highbd_8_sub_pixel_variance32x16,
1032                    vpx_highbd_8_sub_pixel_avg_variance32x16,
1033                    NULL,
1034                    NULL,
1035                    vpx_highbd_sad32x16x4d_bits8)
1036 
1037         HIGHBD_BFP(BLOCK_16X32,
1038                    vpx_highbd_sad16x32_bits8,
1039                    vpx_highbd_sad16x32_avg_bits8,
1040                    vpx_highbd_8_variance16x32,
1041                    vpx_highbd_8_sub_pixel_variance16x32,
1042                    vpx_highbd_8_sub_pixel_avg_variance16x32,
1043                    NULL,
1044                    NULL,
1045                    vpx_highbd_sad16x32x4d_bits8)
1046 
1047         HIGHBD_BFP(BLOCK_64X32,
1048                    vpx_highbd_sad64x32_bits8,
1049                    vpx_highbd_sad64x32_avg_bits8,
1050                    vpx_highbd_8_variance64x32,
1051                    vpx_highbd_8_sub_pixel_variance64x32,
1052                    vpx_highbd_8_sub_pixel_avg_variance64x32,
1053                    NULL,
1054                    NULL,
1055                    vpx_highbd_sad64x32x4d_bits8)
1056 
1057         HIGHBD_BFP(BLOCK_32X64,
1058                    vpx_highbd_sad32x64_bits8,
1059                    vpx_highbd_sad32x64_avg_bits8,
1060                    vpx_highbd_8_variance32x64,
1061                    vpx_highbd_8_sub_pixel_variance32x64,
1062                    vpx_highbd_8_sub_pixel_avg_variance32x64,
1063                    NULL,
1064                    NULL,
1065                    vpx_highbd_sad32x64x4d_bits8)
1066 
1067         HIGHBD_BFP(BLOCK_32X32,
1068                    vpx_highbd_sad32x32_bits8,
1069                    vpx_highbd_sad32x32_avg_bits8,
1070                    vpx_highbd_8_variance32x32,
1071                    vpx_highbd_8_sub_pixel_variance32x32,
1072                    vpx_highbd_8_sub_pixel_avg_variance32x32,
1073                    vpx_highbd_sad32x32x3_bits8,
1074                    vpx_highbd_sad32x32x8_bits8,
1075                    vpx_highbd_sad32x32x4d_bits8)
1076 
1077         HIGHBD_BFP(BLOCK_64X64,
1078                    vpx_highbd_sad64x64_bits8,
1079                    vpx_highbd_sad64x64_avg_bits8,
1080                    vpx_highbd_8_variance64x64,
1081                    vpx_highbd_8_sub_pixel_variance64x64,
1082                    vpx_highbd_8_sub_pixel_avg_variance64x64,
1083                    vpx_highbd_sad64x64x3_bits8,
1084                    vpx_highbd_sad64x64x8_bits8,
1085                    vpx_highbd_sad64x64x4d_bits8)
1086 
1087         HIGHBD_BFP(BLOCK_16X16,
1088                    vpx_highbd_sad16x16_bits8,
1089                    vpx_highbd_sad16x16_avg_bits8,
1090                    vpx_highbd_8_variance16x16,
1091                    vpx_highbd_8_sub_pixel_variance16x16,
1092                    vpx_highbd_8_sub_pixel_avg_variance16x16,
1093                    vpx_highbd_sad16x16x3_bits8,
1094                    vpx_highbd_sad16x16x8_bits8,
1095                    vpx_highbd_sad16x16x4d_bits8)
1096 
1097         HIGHBD_BFP(BLOCK_16X8,
1098                    vpx_highbd_sad16x8_bits8,
1099                    vpx_highbd_sad16x8_avg_bits8,
1100                    vpx_highbd_8_variance16x8,
1101                    vpx_highbd_8_sub_pixel_variance16x8,
1102                    vpx_highbd_8_sub_pixel_avg_variance16x8,
1103                    vpx_highbd_sad16x8x3_bits8,
1104                    vpx_highbd_sad16x8x8_bits8,
1105                    vpx_highbd_sad16x8x4d_bits8)
1106 
1107         HIGHBD_BFP(BLOCK_8X16,
1108                    vpx_highbd_sad8x16_bits8,
1109                    vpx_highbd_sad8x16_avg_bits8,
1110                    vpx_highbd_8_variance8x16,
1111                    vpx_highbd_8_sub_pixel_variance8x16,
1112                    vpx_highbd_8_sub_pixel_avg_variance8x16,
1113                    vpx_highbd_sad8x16x3_bits8,
1114                    vpx_highbd_sad8x16x8_bits8,
1115                    vpx_highbd_sad8x16x4d_bits8)
1116 
1117         HIGHBD_BFP(BLOCK_8X8,
1118                    vpx_highbd_sad8x8_bits8,
1119                    vpx_highbd_sad8x8_avg_bits8,
1120                    vpx_highbd_8_variance8x8,
1121                    vpx_highbd_8_sub_pixel_variance8x8,
1122                    vpx_highbd_8_sub_pixel_avg_variance8x8,
1123                    vpx_highbd_sad8x8x3_bits8,
1124                    vpx_highbd_sad8x8x8_bits8,
1125                    vpx_highbd_sad8x8x4d_bits8)
1126 
1127         HIGHBD_BFP(BLOCK_8X4,
1128                    vpx_highbd_sad8x4_bits8,
1129                    vpx_highbd_sad8x4_avg_bits8,
1130                    vpx_highbd_8_variance8x4,
1131                    vpx_highbd_8_sub_pixel_variance8x4,
1132                    vpx_highbd_8_sub_pixel_avg_variance8x4,
1133                    NULL,
1134                    vpx_highbd_sad8x4x8_bits8,
1135                    vpx_highbd_sad8x4x4d_bits8)
1136 
1137         HIGHBD_BFP(BLOCK_4X8,
1138                    vpx_highbd_sad4x8_bits8,
1139                    vpx_highbd_sad4x8_avg_bits8,
1140                    vpx_highbd_8_variance4x8,
1141                    vpx_highbd_8_sub_pixel_variance4x8,
1142                    vpx_highbd_8_sub_pixel_avg_variance4x8,
1143                    NULL,
1144                    vpx_highbd_sad4x8x8_bits8,
1145                    vpx_highbd_sad4x8x4d_bits8)
1146 
1147         HIGHBD_BFP(BLOCK_4X4,
1148                    vpx_highbd_sad4x4_bits8,
1149                    vpx_highbd_sad4x4_avg_bits8,
1150                    vpx_highbd_8_variance4x4,
1151                    vpx_highbd_8_sub_pixel_variance4x4,
1152                    vpx_highbd_8_sub_pixel_avg_variance4x4,
1153                    vpx_highbd_sad4x4x3_bits8,
1154                    vpx_highbd_sad4x4x8_bits8,
1155                    vpx_highbd_sad4x4x4d_bits8)
1156         break;
1157 
1158       case VPX_BITS_10:
1159         HIGHBD_BFP(BLOCK_32X16,
1160                    vpx_highbd_sad32x16_bits10,
1161                    vpx_highbd_sad32x16_avg_bits10,
1162                    vpx_highbd_10_variance32x16,
1163                    vpx_highbd_10_sub_pixel_variance32x16,
1164                    vpx_highbd_10_sub_pixel_avg_variance32x16,
1165                    NULL,
1166                    NULL,
1167                    vpx_highbd_sad32x16x4d_bits10)
1168 
1169         HIGHBD_BFP(BLOCK_16X32,
1170                    vpx_highbd_sad16x32_bits10,
1171                    vpx_highbd_sad16x32_avg_bits10,
1172                    vpx_highbd_10_variance16x32,
1173                    vpx_highbd_10_sub_pixel_variance16x32,
1174                    vpx_highbd_10_sub_pixel_avg_variance16x32,
1175                    NULL,
1176                    NULL,
1177                    vpx_highbd_sad16x32x4d_bits10)
1178 
1179         HIGHBD_BFP(BLOCK_64X32,
1180                    vpx_highbd_sad64x32_bits10,
1181                    vpx_highbd_sad64x32_avg_bits10,
1182                    vpx_highbd_10_variance64x32,
1183                    vpx_highbd_10_sub_pixel_variance64x32,
1184                    vpx_highbd_10_sub_pixel_avg_variance64x32,
1185                    NULL,
1186                    NULL,
1187                    vpx_highbd_sad64x32x4d_bits10)
1188 
1189         HIGHBD_BFP(BLOCK_32X64,
1190                    vpx_highbd_sad32x64_bits10,
1191                    vpx_highbd_sad32x64_avg_bits10,
1192                    vpx_highbd_10_variance32x64,
1193                    vpx_highbd_10_sub_pixel_variance32x64,
1194                    vpx_highbd_10_sub_pixel_avg_variance32x64,
1195                    NULL,
1196                    NULL,
1197                    vpx_highbd_sad32x64x4d_bits10)
1198 
1199         HIGHBD_BFP(BLOCK_32X32,
1200                    vpx_highbd_sad32x32_bits10,
1201                    vpx_highbd_sad32x32_avg_bits10,
1202                    vpx_highbd_10_variance32x32,
1203                    vpx_highbd_10_sub_pixel_variance32x32,
1204                    vpx_highbd_10_sub_pixel_avg_variance32x32,
1205                    vpx_highbd_sad32x32x3_bits10,
1206                    vpx_highbd_sad32x32x8_bits10,
1207                    vpx_highbd_sad32x32x4d_bits10)
1208 
1209         HIGHBD_BFP(BLOCK_64X64,
1210                    vpx_highbd_sad64x64_bits10,
1211                    vpx_highbd_sad64x64_avg_bits10,
1212                    vpx_highbd_10_variance64x64,
1213                    vpx_highbd_10_sub_pixel_variance64x64,
1214                    vpx_highbd_10_sub_pixel_avg_variance64x64,
1215                    vpx_highbd_sad64x64x3_bits10,
1216                    vpx_highbd_sad64x64x8_bits10,
1217                    vpx_highbd_sad64x64x4d_bits10)
1218 
1219         HIGHBD_BFP(BLOCK_16X16,
1220                    vpx_highbd_sad16x16_bits10,
1221                    vpx_highbd_sad16x16_avg_bits10,
1222                    vpx_highbd_10_variance16x16,
1223                    vpx_highbd_10_sub_pixel_variance16x16,
1224                    vpx_highbd_10_sub_pixel_avg_variance16x16,
1225                    vpx_highbd_sad16x16x3_bits10,
1226                    vpx_highbd_sad16x16x8_bits10,
1227                    vpx_highbd_sad16x16x4d_bits10)
1228 
1229         HIGHBD_BFP(BLOCK_16X8,
1230                    vpx_highbd_sad16x8_bits10,
1231                    vpx_highbd_sad16x8_avg_bits10,
1232                    vpx_highbd_10_variance16x8,
1233                    vpx_highbd_10_sub_pixel_variance16x8,
1234                    vpx_highbd_10_sub_pixel_avg_variance16x8,
1235                    vpx_highbd_sad16x8x3_bits10,
1236                    vpx_highbd_sad16x8x8_bits10,
1237                    vpx_highbd_sad16x8x4d_bits10)
1238 
1239         HIGHBD_BFP(BLOCK_8X16,
1240                    vpx_highbd_sad8x16_bits10,
1241                    vpx_highbd_sad8x16_avg_bits10,
1242                    vpx_highbd_10_variance8x16,
1243                    vpx_highbd_10_sub_pixel_variance8x16,
1244                    vpx_highbd_10_sub_pixel_avg_variance8x16,
1245                    vpx_highbd_sad8x16x3_bits10,
1246                    vpx_highbd_sad8x16x8_bits10,
1247                    vpx_highbd_sad8x16x4d_bits10)
1248 
1249         HIGHBD_BFP(BLOCK_8X8,
1250                    vpx_highbd_sad8x8_bits10,
1251                    vpx_highbd_sad8x8_avg_bits10,
1252                    vpx_highbd_10_variance8x8,
1253                    vpx_highbd_10_sub_pixel_variance8x8,
1254                    vpx_highbd_10_sub_pixel_avg_variance8x8,
1255                    vpx_highbd_sad8x8x3_bits10,
1256                    vpx_highbd_sad8x8x8_bits10,
1257                    vpx_highbd_sad8x8x4d_bits10)
1258 
1259         HIGHBD_BFP(BLOCK_8X4,
1260                    vpx_highbd_sad8x4_bits10,
1261                    vpx_highbd_sad8x4_avg_bits10,
1262                    vpx_highbd_10_variance8x4,
1263                    vpx_highbd_10_sub_pixel_variance8x4,
1264                    vpx_highbd_10_sub_pixel_avg_variance8x4,
1265                    NULL,
1266                    vpx_highbd_sad8x4x8_bits10,
1267                    vpx_highbd_sad8x4x4d_bits10)
1268 
1269         HIGHBD_BFP(BLOCK_4X8,
1270                    vpx_highbd_sad4x8_bits10,
1271                    vpx_highbd_sad4x8_avg_bits10,
1272                    vpx_highbd_10_variance4x8,
1273                    vpx_highbd_10_sub_pixel_variance4x8,
1274                    vpx_highbd_10_sub_pixel_avg_variance4x8,
1275                    NULL,
1276                    vpx_highbd_sad4x8x8_bits10,
1277                    vpx_highbd_sad4x8x4d_bits10)
1278 
1279         HIGHBD_BFP(BLOCK_4X4,
1280                    vpx_highbd_sad4x4_bits10,
1281                    vpx_highbd_sad4x4_avg_bits10,
1282                    vpx_highbd_10_variance4x4,
1283                    vpx_highbd_10_sub_pixel_variance4x4,
1284                    vpx_highbd_10_sub_pixel_avg_variance4x4,
1285                    vpx_highbd_sad4x4x3_bits10,
1286                    vpx_highbd_sad4x4x8_bits10,
1287                    vpx_highbd_sad4x4x4d_bits10)
1288         break;
1289 
1290       case VPX_BITS_12:
1291         HIGHBD_BFP(BLOCK_32X16,
1292                    vpx_highbd_sad32x16_bits12,
1293                    vpx_highbd_sad32x16_avg_bits12,
1294                    vpx_highbd_12_variance32x16,
1295                    vpx_highbd_12_sub_pixel_variance32x16,
1296                    vpx_highbd_12_sub_pixel_avg_variance32x16,
1297                    NULL,
1298                    NULL,
1299                    vpx_highbd_sad32x16x4d_bits12)
1300 
1301         HIGHBD_BFP(BLOCK_16X32,
1302                    vpx_highbd_sad16x32_bits12,
1303                    vpx_highbd_sad16x32_avg_bits12,
1304                    vpx_highbd_12_variance16x32,
1305                    vpx_highbd_12_sub_pixel_variance16x32,
1306                    vpx_highbd_12_sub_pixel_avg_variance16x32,
1307                    NULL,
1308                    NULL,
1309                    vpx_highbd_sad16x32x4d_bits12)
1310 
1311         HIGHBD_BFP(BLOCK_64X32,
1312                    vpx_highbd_sad64x32_bits12,
1313                    vpx_highbd_sad64x32_avg_bits12,
1314                    vpx_highbd_12_variance64x32,
1315                    vpx_highbd_12_sub_pixel_variance64x32,
1316                    vpx_highbd_12_sub_pixel_avg_variance64x32,
1317                    NULL,
1318                    NULL,
1319                    vpx_highbd_sad64x32x4d_bits12)
1320 
1321         HIGHBD_BFP(BLOCK_32X64,
1322                    vpx_highbd_sad32x64_bits12,
1323                    vpx_highbd_sad32x64_avg_bits12,
1324                    vpx_highbd_12_variance32x64,
1325                    vpx_highbd_12_sub_pixel_variance32x64,
1326                    vpx_highbd_12_sub_pixel_avg_variance32x64,
1327                    NULL,
1328                    NULL,
1329                    vpx_highbd_sad32x64x4d_bits12)
1330 
1331         HIGHBD_BFP(BLOCK_32X32,
1332                    vpx_highbd_sad32x32_bits12,
1333                    vpx_highbd_sad32x32_avg_bits12,
1334                    vpx_highbd_12_variance32x32,
1335                    vpx_highbd_12_sub_pixel_variance32x32,
1336                    vpx_highbd_12_sub_pixel_avg_variance32x32,
1337                    vpx_highbd_sad32x32x3_bits12,
1338                    vpx_highbd_sad32x32x8_bits12,
1339                    vpx_highbd_sad32x32x4d_bits12)
1340 
1341         HIGHBD_BFP(BLOCK_64X64,
1342                    vpx_highbd_sad64x64_bits12,
1343                    vpx_highbd_sad64x64_avg_bits12,
1344                    vpx_highbd_12_variance64x64,
1345                    vpx_highbd_12_sub_pixel_variance64x64,
1346                    vpx_highbd_12_sub_pixel_avg_variance64x64,
1347                    vpx_highbd_sad64x64x3_bits12,
1348                    vpx_highbd_sad64x64x8_bits12,
1349                    vpx_highbd_sad64x64x4d_bits12)
1350 
1351         HIGHBD_BFP(BLOCK_16X16,
1352                    vpx_highbd_sad16x16_bits12,
1353                    vpx_highbd_sad16x16_avg_bits12,
1354                    vpx_highbd_12_variance16x16,
1355                    vpx_highbd_12_sub_pixel_variance16x16,
1356                    vpx_highbd_12_sub_pixel_avg_variance16x16,
1357                    vpx_highbd_sad16x16x3_bits12,
1358                    vpx_highbd_sad16x16x8_bits12,
1359                    vpx_highbd_sad16x16x4d_bits12)
1360 
1361         HIGHBD_BFP(BLOCK_16X8,
1362                    vpx_highbd_sad16x8_bits12,
1363                    vpx_highbd_sad16x8_avg_bits12,
1364                    vpx_highbd_12_variance16x8,
1365                    vpx_highbd_12_sub_pixel_variance16x8,
1366                    vpx_highbd_12_sub_pixel_avg_variance16x8,
1367                    vpx_highbd_sad16x8x3_bits12,
1368                    vpx_highbd_sad16x8x8_bits12,
1369                    vpx_highbd_sad16x8x4d_bits12)
1370 
1371         HIGHBD_BFP(BLOCK_8X16,
1372                    vpx_highbd_sad8x16_bits12,
1373                    vpx_highbd_sad8x16_avg_bits12,
1374                    vpx_highbd_12_variance8x16,
1375                    vpx_highbd_12_sub_pixel_variance8x16,
1376                    vpx_highbd_12_sub_pixel_avg_variance8x16,
1377                    vpx_highbd_sad8x16x3_bits12,
1378                    vpx_highbd_sad8x16x8_bits12,
1379                    vpx_highbd_sad8x16x4d_bits12)
1380 
1381         HIGHBD_BFP(BLOCK_8X8,
1382                    vpx_highbd_sad8x8_bits12,
1383                    vpx_highbd_sad8x8_avg_bits12,
1384                    vpx_highbd_12_variance8x8,
1385                    vpx_highbd_12_sub_pixel_variance8x8,
1386                    vpx_highbd_12_sub_pixel_avg_variance8x8,
1387                    vpx_highbd_sad8x8x3_bits12,
1388                    vpx_highbd_sad8x8x8_bits12,
1389                    vpx_highbd_sad8x8x4d_bits12)
1390 
1391         HIGHBD_BFP(BLOCK_8X4,
1392                    vpx_highbd_sad8x4_bits12,
1393                    vpx_highbd_sad8x4_avg_bits12,
1394                    vpx_highbd_12_variance8x4,
1395                    vpx_highbd_12_sub_pixel_variance8x4,
1396                    vpx_highbd_12_sub_pixel_avg_variance8x4,
1397                    NULL,
1398                    vpx_highbd_sad8x4x8_bits12,
1399                    vpx_highbd_sad8x4x4d_bits12)
1400 
1401         HIGHBD_BFP(BLOCK_4X8,
1402                    vpx_highbd_sad4x8_bits12,
1403                    vpx_highbd_sad4x8_avg_bits12,
1404                    vpx_highbd_12_variance4x8,
1405                    vpx_highbd_12_sub_pixel_variance4x8,
1406                    vpx_highbd_12_sub_pixel_avg_variance4x8,
1407                    NULL,
1408                    vpx_highbd_sad4x8x8_bits12,
1409                    vpx_highbd_sad4x8x4d_bits12)
1410 
1411         HIGHBD_BFP(BLOCK_4X4,
1412                    vpx_highbd_sad4x4_bits12,
1413                    vpx_highbd_sad4x4_avg_bits12,
1414                    vpx_highbd_12_variance4x4,
1415                    vpx_highbd_12_sub_pixel_variance4x4,
1416                    vpx_highbd_12_sub_pixel_avg_variance4x4,
1417                    vpx_highbd_sad4x4x3_bits12,
1418                    vpx_highbd_sad4x4x8_bits12,
1419                    vpx_highbd_sad4x4x4d_bits12)
1420         break;
1421 
1422       default:
1423         assert(0 && "cm->bit_depth should be VPX_BITS_8, "
1424                     "VPX_BITS_10 or VPX_BITS_12");
1425     }
1426   }
1427 }
1428 #endif  // CONFIG_VP9_HIGHBITDEPTH
1429 
realloc_segmentation_maps(VP9_COMP * cpi)1430 static void realloc_segmentation_maps(VP9_COMP *cpi) {
1431   VP9_COMMON *const cm = &cpi->common;
1432 
1433   // Create the encoder segmentation map and set all entries to 0
1434   vpx_free(cpi->segmentation_map);
1435   CHECK_MEM_ERROR(cm, cpi->segmentation_map,
1436                   vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1437 
1438   // Create a map used for cyclic background refresh.
1439   if (cpi->cyclic_refresh)
1440     vp9_cyclic_refresh_free(cpi->cyclic_refresh);
1441   CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
1442                   vp9_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
1443 
1444   // Create a map used to mark inactive areas.
1445   vpx_free(cpi->active_map.map);
1446   CHECK_MEM_ERROR(cm, cpi->active_map.map,
1447                   vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1448 
1449   // And a place holder structure is the coding context
1450   // for use if we want to save and restore it
1451   vpx_free(cpi->coding_context.last_frame_seg_map_copy);
1452   CHECK_MEM_ERROR(cm, cpi->coding_context.last_frame_seg_map_copy,
1453                   vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1454 }
1455 
vp9_change_config(struct VP9_COMP * cpi,const VP9EncoderConfig * oxcf)1456 void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) {
1457   VP9_COMMON *const cm = &cpi->common;
1458   RATE_CONTROL *const rc = &cpi->rc;
1459   int last_w = cpi->oxcf.width;
1460   int last_h = cpi->oxcf.height;
1461 
1462   if (cm->profile != oxcf->profile)
1463     cm->profile = oxcf->profile;
1464   cm->bit_depth = oxcf->bit_depth;
1465   cm->color_space = oxcf->color_space;
1466   cm->color_range = oxcf->color_range;
1467 
1468   if (cm->profile <= PROFILE_1)
1469     assert(cm->bit_depth == VPX_BITS_8);
1470   else
1471     assert(cm->bit_depth > VPX_BITS_8);
1472 
1473   cpi->oxcf = *oxcf;
1474 #if CONFIG_VP9_HIGHBITDEPTH
1475   cpi->td.mb.e_mbd.bd = (int)cm->bit_depth;
1476 #endif  // CONFIG_VP9_HIGHBITDEPTH
1477 
1478   rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
1479 
1480   cpi->refresh_golden_frame = 0;
1481   cpi->refresh_last_frame = 1;
1482   cm->refresh_frame_context = 1;
1483   cm->reset_frame_context = 0;
1484 
1485   vp9_reset_segment_features(&cm->seg);
1486   vp9_set_high_precision_mv(cpi, 0);
1487 
1488   {
1489     int i;
1490 
1491     for (i = 0; i < MAX_SEGMENTS; i++)
1492       cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1493   }
1494   cpi->encode_breakout = cpi->oxcf.encode_breakout;
1495 
1496   set_rc_buffer_sizes(rc, &cpi->oxcf);
1497 
1498   // Under a configuration change, where maximum_buffer_size may change,
1499   // keep buffer level clipped to the maximum allowed buffer size.
1500   rc->bits_off_target = VPXMIN(rc->bits_off_target, rc->maximum_buffer_size);
1501   rc->buffer_level = VPXMIN(rc->buffer_level, rc->maximum_buffer_size);
1502 
1503   // Set up frame rate and related parameters rate control values.
1504   vp9_new_framerate(cpi, cpi->framerate);
1505 
1506   // Set absolute upper and lower quality limits
1507   rc->worst_quality = cpi->oxcf.worst_allowed_q;
1508   rc->best_quality = cpi->oxcf.best_allowed_q;
1509 
1510   cm->interp_filter = cpi->sf.default_interp_filter;
1511 
1512   if (cpi->oxcf.render_width > 0 && cpi->oxcf.render_height > 0) {
1513     cm->render_width = cpi->oxcf.render_width;
1514     cm->render_height = cpi->oxcf.render_height;
1515   } else {
1516     cm->render_width = cpi->oxcf.width;
1517     cm->render_height = cpi->oxcf.height;
1518   }
1519   if (last_w != cpi->oxcf.width || last_h != cpi->oxcf.height) {
1520     cm->width = cpi->oxcf.width;
1521     cm->height = cpi->oxcf.height;
1522   }
1523 
1524   if (cpi->initial_width) {
1525     int new_mi_size = 0;
1526     vp9_set_mb_mi(cm, cm->width, cm->height);
1527     new_mi_size = cm->mi_stride * calc_mi_size(cm->mi_rows);
1528     if (cm->mi_alloc_size < new_mi_size) {
1529       vp9_free_context_buffers(cm);
1530       alloc_compressor_data(cpi);
1531       realloc_segmentation_maps(cpi);
1532       cpi->initial_width = cpi->initial_height = 0;
1533     }
1534   }
1535   update_frame_size(cpi);
1536 
1537   if ((cpi->svc.number_temporal_layers > 1 &&
1538       cpi->oxcf.rc_mode == VPX_CBR) ||
1539       ((cpi->svc.number_temporal_layers > 1 ||
1540         cpi->svc.number_spatial_layers > 1) &&
1541        cpi->oxcf.pass != 1)) {
1542     vp9_update_layer_context_change_config(cpi,
1543                                            (int)cpi->oxcf.target_bandwidth);
1544   }
1545 
1546   cpi->alt_ref_source = NULL;
1547   rc->is_src_frame_alt_ref = 0;
1548 
1549 #if 0
1550   // Experimental RD Code
1551   cpi->frame_distortion = 0;
1552   cpi->last_frame_distortion = 0;
1553 #endif
1554 
1555   set_tile_limits(cpi);
1556 
1557   cpi->ext_refresh_frame_flags_pending = 0;
1558   cpi->ext_refresh_frame_context_pending = 0;
1559 
1560 #if CONFIG_VP9_HIGHBITDEPTH
1561   highbd_set_var_fns(cpi);
1562 #endif
1563 }
1564 
1565 #ifndef M_LOG2_E
1566 #define M_LOG2_E 0.693147180559945309417
1567 #endif
1568 #define log2f(x) (log (x) / (float) M_LOG2_E)
1569 
cal_nmvjointsadcost(int * mvjointsadcost)1570 static void cal_nmvjointsadcost(int *mvjointsadcost) {
1571   mvjointsadcost[0] = 600;
1572   mvjointsadcost[1] = 300;
1573   mvjointsadcost[2] = 300;
1574   mvjointsadcost[3] = 300;
1575 }
1576 
cal_nmvsadcosts(int * mvsadcost[2])1577 static void cal_nmvsadcosts(int *mvsadcost[2]) {
1578   int i = 1;
1579 
1580   mvsadcost[0][0] = 0;
1581   mvsadcost[1][0] = 0;
1582 
1583   do {
1584     double z = 256 * (2 * (log2f(8 * i) + .6));
1585     mvsadcost[0][i] = (int)z;
1586     mvsadcost[1][i] = (int)z;
1587     mvsadcost[0][-i] = (int)z;
1588     mvsadcost[1][-i] = (int)z;
1589   } while (++i <= MV_MAX);
1590 }
1591 
cal_nmvsadcosts_hp(int * mvsadcost[2])1592 static void cal_nmvsadcosts_hp(int *mvsadcost[2]) {
1593   int i = 1;
1594 
1595   mvsadcost[0][0] = 0;
1596   mvsadcost[1][0] = 0;
1597 
1598   do {
1599     double z = 256 * (2 * (log2f(8 * i) + .6));
1600     mvsadcost[0][i] = (int)z;
1601     mvsadcost[1][i] = (int)z;
1602     mvsadcost[0][-i] = (int)z;
1603     mvsadcost[1][-i] = (int)z;
1604   } while (++i <= MV_MAX);
1605 }
1606 
1607 
vp9_create_compressor(VP9EncoderConfig * oxcf,BufferPool * const pool)1608 VP9_COMP *vp9_create_compressor(VP9EncoderConfig *oxcf,
1609                                 BufferPool *const pool) {
1610   unsigned int i;
1611   VP9_COMP *volatile const cpi = vpx_memalign(32, sizeof(VP9_COMP));
1612   VP9_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
1613 
1614   if (!cm)
1615     return NULL;
1616 
1617   vp9_zero(*cpi);
1618 
1619   if (setjmp(cm->error.jmp)) {
1620     cm->error.setjmp = 0;
1621     vp9_remove_compressor(cpi);
1622     return 0;
1623   }
1624 
1625   cm->error.setjmp = 1;
1626   cm->alloc_mi = vp9_enc_alloc_mi;
1627   cm->free_mi = vp9_enc_free_mi;
1628   cm->setup_mi = vp9_enc_setup_mi;
1629 
1630   CHECK_MEM_ERROR(cm, cm->fc,
1631                   (FRAME_CONTEXT *)vpx_calloc(1, sizeof(*cm->fc)));
1632   CHECK_MEM_ERROR(cm, cm->frame_contexts,
1633                   (FRAME_CONTEXT *)vpx_calloc(FRAME_CONTEXTS,
1634                   sizeof(*cm->frame_contexts)));
1635 
1636   cpi->use_svc = 0;
1637   cpi->resize_state = 0;
1638   cpi->resize_avg_qp = 0;
1639   cpi->resize_buffer_underflow = 0;
1640   cpi->common.buffer_pool = pool;
1641 
1642   cpi->rc.high_source_sad = 0;
1643 
1644   init_config(cpi, oxcf);
1645   vp9_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
1646 
1647   cm->current_video_frame = 0;
1648   cpi->partition_search_skippable_frame = 0;
1649   cpi->tile_data = NULL;
1650 
1651   realloc_segmentation_maps(cpi);
1652 
1653   CHECK_MEM_ERROR(cm, cpi->nmvcosts[0],
1654                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[0])));
1655   CHECK_MEM_ERROR(cm, cpi->nmvcosts[1],
1656                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[1])));
1657   CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[0],
1658                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[0])));
1659   CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[1],
1660                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[1])));
1661   CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[0],
1662                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[0])));
1663   CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[1],
1664                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[1])));
1665   CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[0],
1666                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[0])));
1667   CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[1],
1668                   vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[1])));
1669 
1670   for (i = 0; i < (sizeof(cpi->mbgraph_stats) /
1671                    sizeof(cpi->mbgraph_stats[0])); i++) {
1672     CHECK_MEM_ERROR(cm, cpi->mbgraph_stats[i].mb_stats,
1673                     vpx_calloc(cm->MBs *
1674                                sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
1675   }
1676 
1677 #if CONFIG_FP_MB_STATS
1678   cpi->use_fp_mb_stats = 0;
1679   if (cpi->use_fp_mb_stats) {
1680     // a place holder used to store the first pass mb stats in the first pass
1681     CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
1682                     vpx_calloc(cm->MBs * sizeof(uint8_t), 1));
1683   } else {
1684     cpi->twopass.frame_mb_stats_buf = NULL;
1685   }
1686 #endif
1687 
1688   cpi->refresh_alt_ref_frame = 0;
1689   cpi->multi_arf_last_grp_enabled = 0;
1690 
1691   cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
1692 #if CONFIG_INTERNAL_STATS
1693   cpi->b_calculate_ssimg = 0;
1694   cpi->b_calculate_blockiness = 1;
1695   cpi->b_calculate_consistency = 1;
1696   cpi->total_inconsistency = 0;
1697   cpi->psnr.worst = 100.0;
1698   cpi->worst_ssim = 100.0;
1699 
1700   cpi->count = 0;
1701   cpi->bytes = 0;
1702 
1703   if (cpi->b_calculate_psnr) {
1704     cpi->total_sq_error = 0;
1705     cpi->total_samples = 0;
1706 
1707     cpi->totalp_sq_error = 0;
1708     cpi->totalp_samples = 0;
1709 
1710     cpi->tot_recode_hits = 0;
1711     cpi->summed_quality = 0;
1712     cpi->summed_weights = 0;
1713     cpi->summedp_quality = 0;
1714     cpi->summedp_weights = 0;
1715   }
1716 
1717   if (cpi->b_calculate_ssimg) {
1718     cpi->ssimg.worst= 100.0;
1719   }
1720   cpi->fastssim.worst = 100.0;
1721 
1722   cpi->psnrhvs.worst = 100.0;
1723 
1724   if (cpi->b_calculate_blockiness) {
1725     cpi->total_blockiness = 0;
1726     cpi->worst_blockiness = 0.0;
1727   }
1728 
1729   if (cpi->b_calculate_consistency) {
1730     cpi->ssim_vars = vpx_malloc(sizeof(*cpi->ssim_vars) *
1731                                 4 * cpi->common.mi_rows * cpi->common.mi_cols);
1732     cpi->worst_consistency = 100.0;
1733   }
1734 
1735 #endif
1736 
1737   cpi->first_time_stamp_ever = INT64_MAX;
1738 
1739   cal_nmvjointsadcost(cpi->td.mb.nmvjointsadcost);
1740   cpi->td.mb.nmvcost[0] = &cpi->nmvcosts[0][MV_MAX];
1741   cpi->td.mb.nmvcost[1] = &cpi->nmvcosts[1][MV_MAX];
1742   cpi->td.mb.nmvsadcost[0] = &cpi->nmvsadcosts[0][MV_MAX];
1743   cpi->td.mb.nmvsadcost[1] = &cpi->nmvsadcosts[1][MV_MAX];
1744   cal_nmvsadcosts(cpi->td.mb.nmvsadcost);
1745 
1746   cpi->td.mb.nmvcost_hp[0] = &cpi->nmvcosts_hp[0][MV_MAX];
1747   cpi->td.mb.nmvcost_hp[1] = &cpi->nmvcosts_hp[1][MV_MAX];
1748   cpi->td.mb.nmvsadcost_hp[0] = &cpi->nmvsadcosts_hp[0][MV_MAX];
1749   cpi->td.mb.nmvsadcost_hp[1] = &cpi->nmvsadcosts_hp[1][MV_MAX];
1750   cal_nmvsadcosts_hp(cpi->td.mb.nmvsadcost_hp);
1751 
1752 #if CONFIG_VP9_TEMPORAL_DENOISING
1753 #ifdef OUTPUT_YUV_DENOISED
1754   yuv_denoised_file = fopen("denoised.yuv", "ab");
1755 #endif
1756 #endif
1757 #ifdef OUTPUT_YUV_SKINMAP
1758   yuv_skinmap_file = fopen("skinmap.yuv", "ab");
1759 #endif
1760 #ifdef OUTPUT_YUV_REC
1761   yuv_rec_file = fopen("rec.yuv", "wb");
1762 #endif
1763 
1764 #if 0
1765   framepsnr = fopen("framepsnr.stt", "a");
1766   kf_list = fopen("kf_list.stt", "w");
1767 #endif
1768 
1769   cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
1770 
1771   if (oxcf->pass == 1) {
1772     vp9_init_first_pass(cpi);
1773   } else if (oxcf->pass == 2) {
1774     const size_t packet_sz = sizeof(FIRSTPASS_STATS);
1775     const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
1776 
1777     if (cpi->svc.number_spatial_layers > 1
1778         || cpi->svc.number_temporal_layers > 1) {
1779       FIRSTPASS_STATS *const stats = oxcf->two_pass_stats_in.buf;
1780       FIRSTPASS_STATS *stats_copy[VPX_SS_MAX_LAYERS] = {0};
1781       int i;
1782 
1783       for (i = 0; i < oxcf->ss_number_layers; ++i) {
1784         FIRSTPASS_STATS *const last_packet_for_layer =
1785             &stats[packets - oxcf->ss_number_layers + i];
1786         const int layer_id = (int)last_packet_for_layer->spatial_layer_id;
1787         const int packets_in_layer = (int)last_packet_for_layer->count + 1;
1788         if (layer_id >= 0 && layer_id < oxcf->ss_number_layers) {
1789           LAYER_CONTEXT *const lc = &cpi->svc.layer_context[layer_id];
1790 
1791           vpx_free(lc->rc_twopass_stats_in.buf);
1792 
1793           lc->rc_twopass_stats_in.sz = packets_in_layer * packet_sz;
1794           CHECK_MEM_ERROR(cm, lc->rc_twopass_stats_in.buf,
1795                           vpx_malloc(lc->rc_twopass_stats_in.sz));
1796           lc->twopass.stats_in_start = lc->rc_twopass_stats_in.buf;
1797           lc->twopass.stats_in = lc->twopass.stats_in_start;
1798           lc->twopass.stats_in_end = lc->twopass.stats_in_start
1799                                      + packets_in_layer - 1;
1800           stats_copy[layer_id] = lc->rc_twopass_stats_in.buf;
1801         }
1802       }
1803 
1804       for (i = 0; i < packets; ++i) {
1805         const int layer_id = (int)stats[i].spatial_layer_id;
1806         if (layer_id >= 0 && layer_id < oxcf->ss_number_layers
1807             && stats_copy[layer_id] != NULL) {
1808           *stats_copy[layer_id] = stats[i];
1809           ++stats_copy[layer_id];
1810         }
1811       }
1812 
1813       vp9_init_second_pass_spatial_svc(cpi);
1814     } else {
1815 #if CONFIG_FP_MB_STATS
1816       if (cpi->use_fp_mb_stats) {
1817         const size_t psz = cpi->common.MBs * sizeof(uint8_t);
1818         const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
1819 
1820         cpi->twopass.firstpass_mb_stats.mb_stats_start =
1821             oxcf->firstpass_mb_stats_in.buf;
1822         cpi->twopass.firstpass_mb_stats.mb_stats_end =
1823             cpi->twopass.firstpass_mb_stats.mb_stats_start +
1824             (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
1825       }
1826 #endif
1827 
1828       cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
1829       cpi->twopass.stats_in = cpi->twopass.stats_in_start;
1830       cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
1831 
1832       vp9_init_second_pass(cpi);
1833     }
1834   }
1835 
1836   vp9_set_speed_features_framesize_independent(cpi);
1837   vp9_set_speed_features_framesize_dependent(cpi);
1838 
1839   // Allocate memory to store variances for a frame.
1840   CHECK_MEM_ERROR(cm, cpi->source_diff_var,
1841                   vpx_calloc(cm->MBs, sizeof(diff)));
1842   cpi->source_var_thresh = 0;
1843   cpi->frames_till_next_var_check = 0;
1844 
1845 #define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX3F, SDX8F, SDX4DF)\
1846     cpi->fn_ptr[BT].sdf            = SDF; \
1847     cpi->fn_ptr[BT].sdaf           = SDAF; \
1848     cpi->fn_ptr[BT].vf             = VF; \
1849     cpi->fn_ptr[BT].svf            = SVF; \
1850     cpi->fn_ptr[BT].svaf           = SVAF; \
1851     cpi->fn_ptr[BT].sdx3f          = SDX3F; \
1852     cpi->fn_ptr[BT].sdx8f          = SDX8F; \
1853     cpi->fn_ptr[BT].sdx4df         = SDX4DF;
1854 
1855   BFP(BLOCK_32X16, vpx_sad32x16, vpx_sad32x16_avg,
1856       vpx_variance32x16, vpx_sub_pixel_variance32x16,
1857       vpx_sub_pixel_avg_variance32x16, NULL, NULL, vpx_sad32x16x4d)
1858 
1859   BFP(BLOCK_16X32, vpx_sad16x32, vpx_sad16x32_avg,
1860       vpx_variance16x32, vpx_sub_pixel_variance16x32,
1861       vpx_sub_pixel_avg_variance16x32, NULL, NULL, vpx_sad16x32x4d)
1862 
1863   BFP(BLOCK_64X32, vpx_sad64x32, vpx_sad64x32_avg,
1864       vpx_variance64x32, vpx_sub_pixel_variance64x32,
1865       vpx_sub_pixel_avg_variance64x32, NULL, NULL, vpx_sad64x32x4d)
1866 
1867   BFP(BLOCK_32X64, vpx_sad32x64, vpx_sad32x64_avg,
1868       vpx_variance32x64, vpx_sub_pixel_variance32x64,
1869       vpx_sub_pixel_avg_variance32x64, NULL, NULL, vpx_sad32x64x4d)
1870 
1871   BFP(BLOCK_32X32, vpx_sad32x32, vpx_sad32x32_avg,
1872       vpx_variance32x32, vpx_sub_pixel_variance32x32,
1873       vpx_sub_pixel_avg_variance32x32, vpx_sad32x32x3, vpx_sad32x32x8,
1874       vpx_sad32x32x4d)
1875 
1876   BFP(BLOCK_64X64, vpx_sad64x64, vpx_sad64x64_avg,
1877       vpx_variance64x64, vpx_sub_pixel_variance64x64,
1878       vpx_sub_pixel_avg_variance64x64, vpx_sad64x64x3, vpx_sad64x64x8,
1879       vpx_sad64x64x4d)
1880 
1881   BFP(BLOCK_16X16, vpx_sad16x16, vpx_sad16x16_avg,
1882       vpx_variance16x16, vpx_sub_pixel_variance16x16,
1883       vpx_sub_pixel_avg_variance16x16, vpx_sad16x16x3, vpx_sad16x16x8,
1884       vpx_sad16x16x4d)
1885 
1886   BFP(BLOCK_16X8, vpx_sad16x8, vpx_sad16x8_avg,
1887       vpx_variance16x8, vpx_sub_pixel_variance16x8,
1888       vpx_sub_pixel_avg_variance16x8,
1889       vpx_sad16x8x3, vpx_sad16x8x8, vpx_sad16x8x4d)
1890 
1891   BFP(BLOCK_8X16, vpx_sad8x16, vpx_sad8x16_avg,
1892       vpx_variance8x16, vpx_sub_pixel_variance8x16,
1893       vpx_sub_pixel_avg_variance8x16,
1894       vpx_sad8x16x3, vpx_sad8x16x8, vpx_sad8x16x4d)
1895 
1896   BFP(BLOCK_8X8, vpx_sad8x8, vpx_sad8x8_avg,
1897       vpx_variance8x8, vpx_sub_pixel_variance8x8,
1898       vpx_sub_pixel_avg_variance8x8,
1899       vpx_sad8x8x3, vpx_sad8x8x8, vpx_sad8x8x4d)
1900 
1901   BFP(BLOCK_8X4, vpx_sad8x4, vpx_sad8x4_avg,
1902       vpx_variance8x4, vpx_sub_pixel_variance8x4,
1903       vpx_sub_pixel_avg_variance8x4, NULL, vpx_sad8x4x8, vpx_sad8x4x4d)
1904 
1905   BFP(BLOCK_4X8, vpx_sad4x8, vpx_sad4x8_avg,
1906       vpx_variance4x8, vpx_sub_pixel_variance4x8,
1907       vpx_sub_pixel_avg_variance4x8, NULL, vpx_sad4x8x8, vpx_sad4x8x4d)
1908 
1909   BFP(BLOCK_4X4, vpx_sad4x4, vpx_sad4x4_avg,
1910       vpx_variance4x4, vpx_sub_pixel_variance4x4,
1911       vpx_sub_pixel_avg_variance4x4,
1912       vpx_sad4x4x3, vpx_sad4x4x8, vpx_sad4x4x4d)
1913 
1914 #if CONFIG_VP9_HIGHBITDEPTH
1915   highbd_set_var_fns(cpi);
1916 #endif
1917 
1918   /* vp9_init_quantizer() is first called here. Add check in
1919    * vp9_frame_init_quantizer() so that vp9_init_quantizer is only
1920    * called later when needed. This will avoid unnecessary calls of
1921    * vp9_init_quantizer() for every frame.
1922    */
1923   vp9_init_quantizer(cpi);
1924 
1925   vp9_loop_filter_init(cm);
1926 
1927   cm->error.setjmp = 0;
1928 
1929   return cpi;
1930 }
1931 #define SNPRINT(H, T) \
1932   snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
1933 
1934 #define SNPRINT2(H, T, V) \
1935   snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T), (V))
1936 
vp9_remove_compressor(VP9_COMP * cpi)1937 void vp9_remove_compressor(VP9_COMP *cpi) {
1938   VP9_COMMON *cm;
1939   unsigned int i;
1940   int t;
1941 
1942   if (!cpi)
1943     return;
1944 
1945   cm = &cpi->common;
1946   if (cm->current_video_frame > 0) {
1947 #if CONFIG_INTERNAL_STATS
1948     vpx_clear_system_state();
1949 
1950     if (cpi->oxcf.pass != 1) {
1951       char headings[512] = {0};
1952       char results[512] = {0};
1953       FILE *f = fopen("opsnr.stt", "a");
1954       double time_encoded = (cpi->last_end_time_stamp_seen
1955                              - cpi->first_time_stamp_ever) / 10000000.000;
1956       double total_encode_time = (cpi->time_receive_data +
1957                                   cpi->time_compress_data)   / 1000.000;
1958       const double dr =
1959           (double)cpi->bytes * (double) 8 / (double)1000 / time_encoded;
1960       const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
1961 
1962       if (cpi->b_calculate_psnr) {
1963         const double total_psnr =
1964             vpx_sse_to_psnr((double)cpi->total_samples, peak,
1965                             (double)cpi->total_sq_error);
1966         const double totalp_psnr =
1967             vpx_sse_to_psnr((double)cpi->totalp_samples, peak,
1968                             (double)cpi->totalp_sq_error);
1969         const double total_ssim = 100 * pow(cpi->summed_quality /
1970                                             cpi->summed_weights, 8.0);
1971         const double totalp_ssim = 100 * pow(cpi->summedp_quality /
1972                                              cpi->summedp_weights, 8.0);
1973 
1974         snprintf(headings, sizeof(headings),
1975                  "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
1976                  "VPXSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
1977                  "WstPsnr\tWstSsim\tWstFast\tWstHVS");
1978         snprintf(results, sizeof(results),
1979                  "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
1980                  "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
1981                  "%7.3f\t%7.3f\t%7.3f\t%7.3f",
1982                  dr, cpi->psnr.stat[ALL] / cpi->count, total_psnr,
1983                  cpi->psnrp.stat[ALL] / cpi->count, totalp_psnr,
1984                  total_ssim, totalp_ssim,
1985                  cpi->fastssim.stat[ALL] / cpi->count,
1986                  cpi->psnrhvs.stat[ALL] / cpi->count,
1987                  cpi->psnr.worst, cpi->worst_ssim, cpi->fastssim.worst,
1988                  cpi->psnrhvs.worst);
1989 
1990         if (cpi->b_calculate_blockiness) {
1991           SNPRINT(headings, "\t  Block\tWstBlck");
1992           SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count);
1993           SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
1994         }
1995 
1996         if (cpi->b_calculate_consistency) {
1997           double consistency =
1998               vpx_sse_to_psnr((double)cpi->totalp_samples, peak,
1999                               (double)cpi->total_inconsistency);
2000 
2001           SNPRINT(headings, "\tConsist\tWstCons");
2002           SNPRINT2(results, "\t%7.3f", consistency);
2003           SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
2004         }
2005 
2006         if (cpi->b_calculate_ssimg) {
2007           SNPRINT(headings, "\t  SSIMG\tWtSSIMG");
2008           SNPRINT2(results, "\t%7.3f", cpi->ssimg.stat[ALL] / cpi->count);
2009           SNPRINT2(results, "\t%7.3f", cpi->ssimg.worst);
2010         }
2011 
2012         fprintf(f, "%s\t    Time\n", headings);
2013         fprintf(f, "%s\t%8.0f\n", results, total_encode_time);
2014       }
2015 
2016       fclose(f);
2017     }
2018 
2019 #endif
2020 
2021 #if 0
2022     {
2023       printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2024       printf("\n_frames recive_data encod_mb_row compress_frame  Total\n");
2025       printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame,
2026              cpi->time_receive_data / 1000, cpi->time_encode_sb_row / 1000,
2027              cpi->time_compress_data / 1000,
2028              (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2029     }
2030 #endif
2031   }
2032 
2033 #if CONFIG_VP9_TEMPORAL_DENOISING
2034   vp9_denoiser_free(&(cpi->denoiser));
2035 #endif
2036 
2037   for (t = 0; t < cpi->num_workers; ++t) {
2038     VPxWorker *const worker = &cpi->workers[t];
2039     EncWorkerData *const thread_data = &cpi->tile_thr_data[t];
2040 
2041     // Deallocate allocated threads.
2042     vpx_get_worker_interface()->end(worker);
2043 
2044     // Deallocate allocated thread data.
2045     if (t < cpi->num_workers - 1) {
2046       vpx_free(thread_data->td->counts);
2047       vp9_free_pc_tree(thread_data->td);
2048       vpx_free(thread_data->td);
2049     }
2050   }
2051   vpx_free(cpi->tile_thr_data);
2052   vpx_free(cpi->workers);
2053 
2054   if (cpi->num_workers > 1)
2055     vp9_loop_filter_dealloc(&cpi->lf_row_sync);
2056 
2057   dealloc_compressor_data(cpi);
2058 
2059   for (i = 0; i < sizeof(cpi->mbgraph_stats) /
2060                   sizeof(cpi->mbgraph_stats[0]); ++i) {
2061     vpx_free(cpi->mbgraph_stats[i].mb_stats);
2062   }
2063 
2064 #if CONFIG_FP_MB_STATS
2065   if (cpi->use_fp_mb_stats) {
2066     vpx_free(cpi->twopass.frame_mb_stats_buf);
2067     cpi->twopass.frame_mb_stats_buf = NULL;
2068   }
2069 #endif
2070 
2071   vp9_remove_common(cm);
2072   vp9_free_ref_frame_buffers(cm->buffer_pool);
2073 #if CONFIG_VP9_POSTPROC
2074   vp9_free_postproc_buffers(cm);
2075 #endif
2076   vpx_free(cpi);
2077 
2078 #if CONFIG_VP9_TEMPORAL_DENOISING
2079 #ifdef OUTPUT_YUV_DENOISED
2080   fclose(yuv_denoised_file);
2081 #endif
2082 #endif
2083 #ifdef OUTPUT_YUV_SKINMAP
2084   fclose(yuv_skinmap_file);
2085 #endif
2086 #ifdef OUTPUT_YUV_REC
2087   fclose(yuv_rec_file);
2088 #endif
2089 
2090 #if 0
2091 
2092   if (keyfile)
2093     fclose(keyfile);
2094 
2095   if (framepsnr)
2096     fclose(framepsnr);
2097 
2098   if (kf_list)
2099     fclose(kf_list);
2100 
2101 #endif
2102 }
2103 
2104 /* TODO(yaowu): The block_variance calls the unoptimized versions of variance()
2105  * and highbd_8_variance(). It should not.
2106  */
encoder_variance(const uint8_t * a,int a_stride,const uint8_t * b,int b_stride,int w,int h,unsigned int * sse,int * sum)2107 static void encoder_variance(const uint8_t *a, int  a_stride,
2108                              const uint8_t *b, int  b_stride,
2109                              int  w, int  h, unsigned int *sse, int *sum) {
2110   int i, j;
2111 
2112   *sum = 0;
2113   *sse = 0;
2114 
2115   for (i = 0; i < h; i++) {
2116     for (j = 0; j < w; j++) {
2117       const int diff = a[j] - b[j];
2118       *sum += diff;
2119       *sse += diff * diff;
2120     }
2121 
2122     a += a_stride;
2123     b += b_stride;
2124   }
2125 }
2126 
2127 #if CONFIG_VP9_HIGHBITDEPTH
encoder_highbd_variance64(const uint8_t * a8,int a_stride,const uint8_t * b8,int b_stride,int w,int h,uint64_t * sse,uint64_t * sum)2128 static void encoder_highbd_variance64(const uint8_t *a8, int  a_stride,
2129                                       const uint8_t *b8, int  b_stride,
2130                                       int w, int h, uint64_t *sse,
2131                                       uint64_t *sum) {
2132   int i, j;
2133 
2134   uint16_t *a = CONVERT_TO_SHORTPTR(a8);
2135   uint16_t *b = CONVERT_TO_SHORTPTR(b8);
2136   *sum = 0;
2137   *sse = 0;
2138 
2139   for (i = 0; i < h; i++) {
2140     for (j = 0; j < w; j++) {
2141       const int diff = a[j] - b[j];
2142       *sum += diff;
2143       *sse += diff * diff;
2144     }
2145     a += a_stride;
2146     b += b_stride;
2147   }
2148 }
2149 
encoder_highbd_8_variance(const uint8_t * a8,int a_stride,const uint8_t * b8,int b_stride,int w,int h,unsigned int * sse,int * sum)2150 static void encoder_highbd_8_variance(const uint8_t *a8, int  a_stride,
2151                                       const uint8_t *b8, int  b_stride,
2152                                       int w, int h,
2153                                       unsigned int *sse, int *sum) {
2154   uint64_t sse_long = 0;
2155   uint64_t sum_long = 0;
2156   encoder_highbd_variance64(a8, a_stride, b8, b_stride, w, h,
2157                             &sse_long, &sum_long);
2158   *sse = (unsigned int)sse_long;
2159   *sum = (int)sum_long;
2160 }
2161 #endif  // CONFIG_VP9_HIGHBITDEPTH
2162 
get_sse(const uint8_t * a,int a_stride,const uint8_t * b,int b_stride,int width,int height)2163 static int64_t get_sse(const uint8_t *a, int a_stride,
2164                        const uint8_t *b, int b_stride,
2165                        int width, int height) {
2166   const int dw = width % 16;
2167   const int dh = height % 16;
2168   int64_t total_sse = 0;
2169   unsigned int sse = 0;
2170   int sum = 0;
2171   int x, y;
2172 
2173   if (dw > 0) {
2174     encoder_variance(&a[width - dw], a_stride, &b[width - dw], b_stride,
2175                      dw, height, &sse, &sum);
2176     total_sse += sse;
2177   }
2178 
2179   if (dh > 0) {
2180     encoder_variance(&a[(height - dh) * a_stride], a_stride,
2181                      &b[(height - dh) * b_stride], b_stride,
2182                      width - dw, dh, &sse, &sum);
2183     total_sse += sse;
2184   }
2185 
2186   for (y = 0; y < height / 16; ++y) {
2187     const uint8_t *pa = a;
2188     const uint8_t *pb = b;
2189     for (x = 0; x < width / 16; ++x) {
2190       vpx_mse16x16(pa, a_stride, pb, b_stride, &sse);
2191       total_sse += sse;
2192 
2193       pa += 16;
2194       pb += 16;
2195     }
2196 
2197     a += 16 * a_stride;
2198     b += 16 * b_stride;
2199   }
2200 
2201   return total_sse;
2202 }
2203 
2204 #if CONFIG_VP9_HIGHBITDEPTH
highbd_get_sse_shift(const uint8_t * a8,int a_stride,const uint8_t * b8,int b_stride,int width,int height,unsigned int input_shift)2205 static int64_t highbd_get_sse_shift(const uint8_t *a8, int a_stride,
2206                                     const uint8_t *b8, int b_stride,
2207                                     int width, int height,
2208                                     unsigned int input_shift) {
2209   const uint16_t *a = CONVERT_TO_SHORTPTR(a8);
2210   const uint16_t *b = CONVERT_TO_SHORTPTR(b8);
2211   int64_t total_sse = 0;
2212   int x, y;
2213   for (y = 0; y < height; ++y) {
2214     for (x = 0; x < width; ++x) {
2215       int64_t diff;
2216       diff = (a[x] >> input_shift) - (b[x] >> input_shift);
2217       total_sse += diff * diff;
2218     }
2219     a += a_stride;
2220     b += b_stride;
2221   }
2222   return total_sse;
2223 }
2224 
highbd_get_sse(const uint8_t * a,int a_stride,const uint8_t * b,int b_stride,int width,int height)2225 static int64_t highbd_get_sse(const uint8_t *a, int a_stride,
2226                               const uint8_t *b, int b_stride,
2227                               int width, int height) {
2228   int64_t total_sse = 0;
2229   int x, y;
2230   const int dw = width % 16;
2231   const int dh = height % 16;
2232   unsigned int sse = 0;
2233   int sum = 0;
2234   if (dw > 0) {
2235     encoder_highbd_8_variance(&a[width - dw], a_stride,
2236                               &b[width - dw], b_stride,
2237                               dw, height, &sse, &sum);
2238     total_sse += sse;
2239   }
2240   if (dh > 0) {
2241     encoder_highbd_8_variance(&a[(height - dh) * a_stride], a_stride,
2242                               &b[(height - dh) * b_stride], b_stride,
2243                               width - dw, dh, &sse, &sum);
2244     total_sse += sse;
2245   }
2246   for (y = 0; y < height / 16; ++y) {
2247     const uint8_t *pa = a;
2248     const uint8_t *pb = b;
2249     for (x = 0; x < width / 16; ++x) {
2250       vpx_highbd_8_mse16x16(pa, a_stride, pb, b_stride, &sse);
2251       total_sse += sse;
2252       pa += 16;
2253       pb += 16;
2254     }
2255     a += 16 * a_stride;
2256     b += 16 * b_stride;
2257   }
2258   return total_sse;
2259 }
2260 #endif  // CONFIG_VP9_HIGHBITDEPTH
2261 
2262 typedef struct {
2263   double psnr[4];       // total/y/u/v
2264   uint64_t sse[4];      // total/y/u/v
2265   uint32_t samples[4];  // total/y/u/v
2266 } PSNR_STATS;
2267 
2268 #if CONFIG_VP9_HIGHBITDEPTH
calc_highbd_psnr(const YV12_BUFFER_CONFIG * a,const YV12_BUFFER_CONFIG * b,PSNR_STATS * psnr,unsigned int bit_depth,unsigned int in_bit_depth)2269 static void calc_highbd_psnr(const YV12_BUFFER_CONFIG *a,
2270                              const YV12_BUFFER_CONFIG *b,
2271                              PSNR_STATS *psnr,
2272                              unsigned int bit_depth,
2273                              unsigned int in_bit_depth) {
2274   const int widths[3] =
2275       {a->y_crop_width,  a->uv_crop_width,  a->uv_crop_width };
2276   const int heights[3] =
2277       {a->y_crop_height, a->uv_crop_height, a->uv_crop_height};
2278   const uint8_t *a_planes[3] = {a->y_buffer, a->u_buffer,  a->v_buffer };
2279   const int a_strides[3] = {a->y_stride, a->uv_stride, a->uv_stride};
2280   const uint8_t *b_planes[3] = {b->y_buffer, b->u_buffer,  b->v_buffer };
2281   const int b_strides[3] = {b->y_stride, b->uv_stride, b->uv_stride};
2282   int i;
2283   uint64_t total_sse = 0;
2284   uint32_t total_samples = 0;
2285   const double peak = (double)((1 << in_bit_depth) - 1);
2286   const unsigned int input_shift = bit_depth - in_bit_depth;
2287 
2288   for (i = 0; i < 3; ++i) {
2289     const int w = widths[i];
2290     const int h = heights[i];
2291     const uint32_t samples = w * h;
2292     uint64_t sse;
2293     if (a->flags & YV12_FLAG_HIGHBITDEPTH) {
2294       if (input_shift) {
2295         sse = highbd_get_sse_shift(a_planes[i], a_strides[i],
2296                                    b_planes[i], b_strides[i], w, h,
2297                                    input_shift);
2298       } else {
2299         sse = highbd_get_sse(a_planes[i], a_strides[i],
2300                              b_planes[i], b_strides[i], w, h);
2301       }
2302     } else {
2303       sse = get_sse(a_planes[i], a_strides[i],
2304                     b_planes[i], b_strides[i],
2305                     w, h);
2306     }
2307     psnr->sse[1 + i] = sse;
2308     psnr->samples[1 + i] = samples;
2309     psnr->psnr[1 + i] = vpx_sse_to_psnr(samples, peak, (double)sse);
2310 
2311     total_sse += sse;
2312     total_samples += samples;
2313   }
2314 
2315   psnr->sse[0] = total_sse;
2316   psnr->samples[0] = total_samples;
2317   psnr->psnr[0] = vpx_sse_to_psnr((double)total_samples, peak,
2318                                   (double)total_sse);
2319 }
2320 
2321 #else  // !CONFIG_VP9_HIGHBITDEPTH
2322 
calc_psnr(const YV12_BUFFER_CONFIG * a,const YV12_BUFFER_CONFIG * b,PSNR_STATS * psnr)2323 static void calc_psnr(const YV12_BUFFER_CONFIG *a, const YV12_BUFFER_CONFIG *b,
2324                       PSNR_STATS *psnr) {
2325   static const double peak = 255.0;
2326   const int widths[3]        = {
2327       a->y_crop_width, a->uv_crop_width, a->uv_crop_width};
2328   const int heights[3]       = {
2329       a->y_crop_height, a->uv_crop_height, a->uv_crop_height};
2330   const uint8_t *a_planes[3] = {a->y_buffer, a->u_buffer, a->v_buffer};
2331   const int a_strides[3]     = {a->y_stride, a->uv_stride, a->uv_stride};
2332   const uint8_t *b_planes[3] = {b->y_buffer, b->u_buffer, b->v_buffer};
2333   const int b_strides[3]     = {b->y_stride, b->uv_stride, b->uv_stride};
2334   int i;
2335   uint64_t total_sse = 0;
2336   uint32_t total_samples = 0;
2337 
2338   for (i = 0; i < 3; ++i) {
2339     const int w = widths[i];
2340     const int h = heights[i];
2341     const uint32_t samples = w * h;
2342     const uint64_t sse = get_sse(a_planes[i], a_strides[i],
2343                                  b_planes[i], b_strides[i],
2344                                  w, h);
2345     psnr->sse[1 + i] = sse;
2346     psnr->samples[1 + i] = samples;
2347     psnr->psnr[1 + i] = vpx_sse_to_psnr(samples, peak, (double)sse);
2348 
2349     total_sse += sse;
2350     total_samples += samples;
2351   }
2352 
2353   psnr->sse[0] = total_sse;
2354   psnr->samples[0] = total_samples;
2355   psnr->psnr[0] = vpx_sse_to_psnr((double)total_samples, peak,
2356                                   (double)total_sse);
2357 }
2358 #endif  // CONFIG_VP9_HIGHBITDEPTH
2359 
generate_psnr_packet(VP9_COMP * cpi)2360 static void generate_psnr_packet(VP9_COMP *cpi) {
2361   struct vpx_codec_cx_pkt pkt;
2362   int i;
2363   PSNR_STATS psnr;
2364 #if CONFIG_VP9_HIGHBITDEPTH
2365   calc_highbd_psnr(cpi->Source, cpi->common.frame_to_show, &psnr,
2366                    cpi->td.mb.e_mbd.bd, cpi->oxcf.input_bit_depth);
2367 #else
2368   calc_psnr(cpi->Source, cpi->common.frame_to_show, &psnr);
2369 #endif
2370 
2371   for (i = 0; i < 4; ++i) {
2372     pkt.data.psnr.samples[i] = psnr.samples[i];
2373     pkt.data.psnr.sse[i] = psnr.sse[i];
2374     pkt.data.psnr.psnr[i] = psnr.psnr[i];
2375   }
2376   pkt.kind = VPX_CODEC_PSNR_PKT;
2377   if (cpi->use_svc)
2378     cpi->svc.layer_context[cpi->svc.spatial_layer_id *
2379         cpi->svc.number_temporal_layers].psnr_pkt = pkt.data.psnr;
2380   else
2381     vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2382 }
2383 
vp9_use_as_reference(VP9_COMP * cpi,int ref_frame_flags)2384 int vp9_use_as_reference(VP9_COMP *cpi, int ref_frame_flags) {
2385   if (ref_frame_flags > 7)
2386     return -1;
2387 
2388   cpi->ref_frame_flags = ref_frame_flags;
2389   return 0;
2390 }
2391 
vp9_update_reference(VP9_COMP * cpi,int ref_frame_flags)2392 void vp9_update_reference(VP9_COMP *cpi, int ref_frame_flags) {
2393   cpi->ext_refresh_golden_frame = (ref_frame_flags & VP9_GOLD_FLAG) != 0;
2394   cpi->ext_refresh_alt_ref_frame = (ref_frame_flags & VP9_ALT_FLAG) != 0;
2395   cpi->ext_refresh_last_frame = (ref_frame_flags & VP9_LAST_FLAG) != 0;
2396   cpi->ext_refresh_frame_flags_pending = 1;
2397 }
2398 
get_vp9_ref_frame_buffer(VP9_COMP * cpi,VP9_REFFRAME ref_frame_flag)2399 static YV12_BUFFER_CONFIG *get_vp9_ref_frame_buffer(VP9_COMP *cpi,
2400                                 VP9_REFFRAME ref_frame_flag) {
2401   MV_REFERENCE_FRAME ref_frame = NONE;
2402   if (ref_frame_flag == VP9_LAST_FLAG)
2403     ref_frame = LAST_FRAME;
2404   else if (ref_frame_flag == VP9_GOLD_FLAG)
2405     ref_frame = GOLDEN_FRAME;
2406   else if (ref_frame_flag == VP9_ALT_FLAG)
2407     ref_frame = ALTREF_FRAME;
2408 
2409   return ref_frame == NONE ? NULL : get_ref_frame_buffer(cpi, ref_frame);
2410 }
2411 
vp9_copy_reference_enc(VP9_COMP * cpi,VP9_REFFRAME ref_frame_flag,YV12_BUFFER_CONFIG * sd)2412 int vp9_copy_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
2413                            YV12_BUFFER_CONFIG *sd) {
2414   YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
2415   if (cfg) {
2416     vp8_yv12_copy_frame(cfg, sd);
2417     return 0;
2418   } else {
2419     return -1;
2420   }
2421 }
2422 
vp9_set_reference_enc(VP9_COMP * cpi,VP9_REFFRAME ref_frame_flag,YV12_BUFFER_CONFIG * sd)2423 int vp9_set_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
2424                           YV12_BUFFER_CONFIG *sd) {
2425   YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
2426   if (cfg) {
2427     vp8_yv12_copy_frame(sd, cfg);
2428     return 0;
2429   } else {
2430     return -1;
2431   }
2432 }
2433 
vp9_update_entropy(VP9_COMP * cpi,int update)2434 int vp9_update_entropy(VP9_COMP * cpi, int update) {
2435   cpi->ext_refresh_frame_context = update;
2436   cpi->ext_refresh_frame_context_pending = 1;
2437   return 0;
2438 }
2439 
2440 #if defined(OUTPUT_YUV_DENOISED) || defined(OUTPUT_YUV_SKINMAP)
2441 // The denoiser buffer is allocated as a YUV 440 buffer. This function writes it
2442 // as YUV 420. We simply use the top-left pixels of the UV buffers, since we do
2443 // not denoise the UV channels at this time. If ever we implement UV channel
2444 // denoising we will have to modify this.
vp9_write_yuv_frame_420(YV12_BUFFER_CONFIG * s,FILE * f)2445 void vp9_write_yuv_frame_420(YV12_BUFFER_CONFIG *s, FILE *f) {
2446   uint8_t *src = s->y_buffer;
2447   int h = s->y_height;
2448 
2449   do {
2450     fwrite(src, s->y_width, 1, f);
2451     src += s->y_stride;
2452   } while (--h);
2453 
2454   src = s->u_buffer;
2455   h = s->uv_height;
2456 
2457   do {
2458     fwrite(src, s->uv_width, 1, f);
2459     src += s->uv_stride;
2460   } while (--h);
2461 
2462   src = s->v_buffer;
2463   h = s->uv_height;
2464 
2465   do {
2466     fwrite(src, s->uv_width, 1, f);
2467     src += s->uv_stride;
2468   } while (--h);
2469 }
2470 #endif
2471 
2472 #ifdef OUTPUT_YUV_REC
vp9_write_yuv_rec_frame(VP9_COMMON * cm)2473 void vp9_write_yuv_rec_frame(VP9_COMMON *cm) {
2474   YV12_BUFFER_CONFIG *s = cm->frame_to_show;
2475   uint8_t *src = s->y_buffer;
2476   int h = cm->height;
2477 
2478 #if CONFIG_VP9_HIGHBITDEPTH
2479   if (s->flags & YV12_FLAG_HIGHBITDEPTH) {
2480     uint16_t *src16 = CONVERT_TO_SHORTPTR(s->y_buffer);
2481 
2482     do {
2483       fwrite(src16, s->y_width, 2,  yuv_rec_file);
2484       src16 += s->y_stride;
2485     } while (--h);
2486 
2487     src16 = CONVERT_TO_SHORTPTR(s->u_buffer);
2488     h = s->uv_height;
2489 
2490     do {
2491       fwrite(src16, s->uv_width, 2,  yuv_rec_file);
2492       src16 += s->uv_stride;
2493     } while (--h);
2494 
2495     src16 = CONVERT_TO_SHORTPTR(s->v_buffer);
2496     h = s->uv_height;
2497 
2498     do {
2499       fwrite(src16, s->uv_width, 2, yuv_rec_file);
2500       src16 += s->uv_stride;
2501     } while (--h);
2502 
2503     fflush(yuv_rec_file);
2504     return;
2505   }
2506 #endif  // CONFIG_VP9_HIGHBITDEPTH
2507 
2508   do {
2509     fwrite(src, s->y_width, 1,  yuv_rec_file);
2510     src += s->y_stride;
2511   } while (--h);
2512 
2513   src = s->u_buffer;
2514   h = s->uv_height;
2515 
2516   do {
2517     fwrite(src, s->uv_width, 1,  yuv_rec_file);
2518     src += s->uv_stride;
2519   } while (--h);
2520 
2521   src = s->v_buffer;
2522   h = s->uv_height;
2523 
2524   do {
2525     fwrite(src, s->uv_width, 1, yuv_rec_file);
2526     src += s->uv_stride;
2527   } while (--h);
2528 
2529   fflush(yuv_rec_file);
2530 }
2531 #endif
2532 
2533 #if CONFIG_VP9_HIGHBITDEPTH
scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG * src,YV12_BUFFER_CONFIG * dst,int bd)2534 static void scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG *src,
2535                                                 YV12_BUFFER_CONFIG *dst,
2536                                                 int bd) {
2537 #else
2538 static void scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG *src,
2539                                                 YV12_BUFFER_CONFIG *dst) {
2540 #endif  // CONFIG_VP9_HIGHBITDEPTH
2541   // TODO(dkovalev): replace YV12_BUFFER_CONFIG with vpx_image_t
2542   int i;
2543   const uint8_t *const srcs[3] = {src->y_buffer, src->u_buffer, src->v_buffer};
2544   const int src_strides[3] = {src->y_stride, src->uv_stride, src->uv_stride};
2545   const int src_widths[3] = {src->y_crop_width, src->uv_crop_width,
2546                              src->uv_crop_width };
2547   const int src_heights[3] = {src->y_crop_height, src->uv_crop_height,
2548                               src->uv_crop_height};
2549   uint8_t *const dsts[3] = {dst->y_buffer, dst->u_buffer, dst->v_buffer};
2550   const int dst_strides[3] = {dst->y_stride, dst->uv_stride, dst->uv_stride};
2551   const int dst_widths[3] = {dst->y_crop_width, dst->uv_crop_width,
2552                              dst->uv_crop_width};
2553   const int dst_heights[3] = {dst->y_crop_height, dst->uv_crop_height,
2554                               dst->uv_crop_height};
2555 
2556   for (i = 0; i < MAX_MB_PLANE; ++i) {
2557 #if CONFIG_VP9_HIGHBITDEPTH
2558     if (src->flags & YV12_FLAG_HIGHBITDEPTH) {
2559       vp9_highbd_resize_plane(srcs[i], src_heights[i], src_widths[i],
2560                               src_strides[i], dsts[i], dst_heights[i],
2561                               dst_widths[i], dst_strides[i], bd);
2562     } else {
2563       vp9_resize_plane(srcs[i], src_heights[i], src_widths[i], src_strides[i],
2564                        dsts[i], dst_heights[i], dst_widths[i], dst_strides[i]);
2565     }
2566 #else
2567     vp9_resize_plane(srcs[i], src_heights[i], src_widths[i], src_strides[i],
2568                      dsts[i], dst_heights[i], dst_widths[i], dst_strides[i]);
2569 #endif  // CONFIG_VP9_HIGHBITDEPTH
2570   }
2571   vpx_extend_frame_borders(dst);
2572 }
2573 
2574 #if CONFIG_VP9_HIGHBITDEPTH
2575 static void scale_and_extend_frame(const YV12_BUFFER_CONFIG *src,
2576                                    YV12_BUFFER_CONFIG *dst, int bd) {
2577 #else
2578 static void scale_and_extend_frame(const YV12_BUFFER_CONFIG *src,
2579                                    YV12_BUFFER_CONFIG *dst) {
2580 #endif  // CONFIG_VP9_HIGHBITDEPTH
2581   const int src_w = src->y_crop_width;
2582   const int src_h = src->y_crop_height;
2583   const int dst_w = dst->y_crop_width;
2584   const int dst_h = dst->y_crop_height;
2585   const uint8_t *const srcs[3] = {src->y_buffer, src->u_buffer, src->v_buffer};
2586   const int src_strides[3] = {src->y_stride, src->uv_stride, src->uv_stride};
2587   uint8_t *const dsts[3] = {dst->y_buffer, dst->u_buffer, dst->v_buffer};
2588   const int dst_strides[3] = {dst->y_stride, dst->uv_stride, dst->uv_stride};
2589   const InterpKernel *const kernel = vp9_filter_kernels[EIGHTTAP];
2590   int x, y, i;
2591 
2592   for (y = 0; y < dst_h; y += 16) {
2593     for (x = 0; x < dst_w; x += 16) {
2594       for (i = 0; i < MAX_MB_PLANE; ++i) {
2595         const int factor = (i == 0 || i == 3 ? 1 : 2);
2596         const int x_q4 = x * (16 / factor) * src_w / dst_w;
2597         const int y_q4 = y * (16 / factor) * src_h / dst_h;
2598         const int src_stride = src_strides[i];
2599         const int dst_stride = dst_strides[i];
2600         const uint8_t *src_ptr = srcs[i] + (y / factor) * src_h / dst_h *
2601                                      src_stride + (x / factor) * src_w / dst_w;
2602         uint8_t *dst_ptr = dsts[i] + (y / factor) * dst_stride + (x / factor);
2603 
2604 #if CONFIG_VP9_HIGHBITDEPTH
2605         if (src->flags & YV12_FLAG_HIGHBITDEPTH) {
2606           vpx_highbd_convolve8(src_ptr, src_stride, dst_ptr, dst_stride,
2607                                kernel[x_q4 & 0xf], 16 * src_w / dst_w,
2608                                kernel[y_q4 & 0xf], 16 * src_h / dst_h,
2609                                16 / factor, 16 / factor, bd);
2610         } else {
2611           vpx_scaled_2d(src_ptr, src_stride, dst_ptr, dst_stride,
2612                         kernel[x_q4 & 0xf], 16 * src_w / dst_w,
2613                         kernel[y_q4 & 0xf], 16 * src_h / dst_h,
2614                         16 / factor, 16 / factor);
2615         }
2616 #else
2617         vpx_scaled_2d(src_ptr, src_stride, dst_ptr, dst_stride,
2618                       kernel[x_q4 & 0xf], 16 * src_w / dst_w,
2619                       kernel[y_q4 & 0xf], 16 * src_h / dst_h,
2620                       16 / factor, 16 / factor);
2621 #endif  // CONFIG_VP9_HIGHBITDEPTH
2622       }
2623     }
2624   }
2625 
2626   vpx_extend_frame_borders(dst);
2627 }
2628 
2629 static int scale_down(VP9_COMP *cpi, int q) {
2630   RATE_CONTROL *const rc = &cpi->rc;
2631   GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2632   int scale = 0;
2633   assert(frame_is_kf_gf_arf(cpi));
2634 
2635   if (rc->frame_size_selector == UNSCALED &&
2636       q >= rc->rf_level_maxq[gf_group->rf_level[gf_group->index]]) {
2637     const int max_size_thresh = (int)(rate_thresh_mult[SCALE_STEP1]
2638         * VPXMAX(rc->this_frame_target, rc->avg_frame_bandwidth));
2639     scale = rc->projected_frame_size > max_size_thresh ? 1 : 0;
2640   }
2641   return scale;
2642 }
2643 
2644 // Function to test for conditions that indicate we should loop
2645 // back and recode a frame.
2646 static int recode_loop_test(VP9_COMP *cpi,
2647                             int high_limit, int low_limit,
2648                             int q, int maxq, int minq) {
2649   const RATE_CONTROL *const rc = &cpi->rc;
2650   const VP9EncoderConfig *const oxcf = &cpi->oxcf;
2651   const int frame_is_kfgfarf = frame_is_kf_gf_arf(cpi);
2652   int force_recode = 0;
2653 
2654   if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
2655       (cpi->sf.recode_loop == ALLOW_RECODE) ||
2656       (frame_is_kfgfarf &&
2657        (cpi->sf.recode_loop == ALLOW_RECODE_KFARFGF))) {
2658     if (frame_is_kfgfarf &&
2659         (oxcf->resize_mode == RESIZE_DYNAMIC) &&
2660         scale_down(cpi, q)) {
2661         // Code this group at a lower resolution.
2662         cpi->resize_pending = 1;
2663         return 1;
2664     }
2665 
2666     // TODO(agrange) high_limit could be greater than the scale-down threshold.
2667     if ((rc->projected_frame_size > high_limit && q < maxq) ||
2668         (rc->projected_frame_size < low_limit && q > minq)) {
2669       force_recode = 1;
2670     } else if (cpi->oxcf.rc_mode == VPX_CQ) {
2671       // Deal with frame undershoot and whether or not we are
2672       // below the automatically set cq level.
2673       if (q > oxcf->cq_level &&
2674           rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
2675         force_recode = 1;
2676       }
2677     }
2678   }
2679   return force_recode;
2680 }
2681 
2682 void vp9_update_reference_frames(VP9_COMP *cpi) {
2683   VP9_COMMON * const cm = &cpi->common;
2684   BufferPool *const pool = cm->buffer_pool;
2685 
2686   // At this point the new frame has been encoded.
2687   // If any buffer copy / swapping is signaled it should be done here.
2688   if (cm->frame_type == KEY_FRAME) {
2689     ref_cnt_fb(pool->frame_bufs,
2690                &cm->ref_frame_map[cpi->gld_fb_idx], cm->new_fb_idx);
2691     ref_cnt_fb(pool->frame_bufs,
2692                &cm->ref_frame_map[cpi->alt_fb_idx], cm->new_fb_idx);
2693   } else if (vp9_preserve_existing_gf(cpi)) {
2694     // We have decided to preserve the previously existing golden frame as our
2695     // new ARF frame. However, in the short term in function
2696     // vp9_bitstream.c::get_refresh_mask() we left it in the GF slot and, if
2697     // we're updating the GF with the current decoded frame, we save it to the
2698     // ARF slot instead.
2699     // We now have to update the ARF with the current frame and swap gld_fb_idx
2700     // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
2701     // slot and, if we're updating the GF, the current frame becomes the new GF.
2702     int tmp;
2703 
2704     ref_cnt_fb(pool->frame_bufs,
2705                &cm->ref_frame_map[cpi->alt_fb_idx], cm->new_fb_idx);
2706 
2707     tmp = cpi->alt_fb_idx;
2708     cpi->alt_fb_idx = cpi->gld_fb_idx;
2709     cpi->gld_fb_idx = tmp;
2710 
2711     if (is_two_pass_svc(cpi)) {
2712       cpi->svc.layer_context[0].gold_ref_idx = cpi->gld_fb_idx;
2713       cpi->svc.layer_context[0].alt_ref_idx = cpi->alt_fb_idx;
2714     }
2715   } else { /* For non key/golden frames */
2716     if (cpi->refresh_alt_ref_frame) {
2717       int arf_idx = cpi->alt_fb_idx;
2718       if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
2719         const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2720         arf_idx = gf_group->arf_update_idx[gf_group->index];
2721       }
2722 
2723       ref_cnt_fb(pool->frame_bufs,
2724                  &cm->ref_frame_map[arf_idx], cm->new_fb_idx);
2725       memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
2726              cpi->interp_filter_selected[0],
2727              sizeof(cpi->interp_filter_selected[0]));
2728     }
2729 
2730     if (cpi->refresh_golden_frame) {
2731       ref_cnt_fb(pool->frame_bufs,
2732                  &cm->ref_frame_map[cpi->gld_fb_idx], cm->new_fb_idx);
2733       if (!cpi->rc.is_src_frame_alt_ref)
2734         memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
2735                cpi->interp_filter_selected[0],
2736                sizeof(cpi->interp_filter_selected[0]));
2737       else
2738         memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
2739                cpi->interp_filter_selected[ALTREF_FRAME],
2740                sizeof(cpi->interp_filter_selected[ALTREF_FRAME]));
2741     }
2742   }
2743 
2744   if (cpi->refresh_last_frame) {
2745     ref_cnt_fb(pool->frame_bufs,
2746                &cm->ref_frame_map[cpi->lst_fb_idx], cm->new_fb_idx);
2747     if (!cpi->rc.is_src_frame_alt_ref)
2748       memcpy(cpi->interp_filter_selected[LAST_FRAME],
2749              cpi->interp_filter_selected[0],
2750              sizeof(cpi->interp_filter_selected[0]));
2751   }
2752 #if CONFIG_VP9_TEMPORAL_DENOISING
2753   if (cpi->oxcf.noise_sensitivity > 0) {
2754     vp9_denoiser_update_frame_info(&cpi->denoiser,
2755                                    *cpi->Source,
2756                                    cpi->common.frame_type,
2757                                    cpi->refresh_alt_ref_frame,
2758                                    cpi->refresh_golden_frame,
2759                                    cpi->refresh_last_frame,
2760                                    cpi->resize_pending);
2761   }
2762 #endif
2763 }
2764 
2765 static void loopfilter_frame(VP9_COMP *cpi, VP9_COMMON *cm) {
2766   MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
2767   struct loopfilter *lf = &cm->lf;
2768 
2769   if (xd->lossless) {
2770       lf->filter_level = 0;
2771   } else {
2772     struct vpx_usec_timer timer;
2773 
2774     vpx_clear_system_state();
2775 
2776     vpx_usec_timer_start(&timer);
2777 
2778     vp9_pick_filter_level(cpi->Source, cpi, cpi->sf.lpf_pick);
2779 
2780     vpx_usec_timer_mark(&timer);
2781     cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
2782   }
2783 
2784   if (lf->filter_level > 0) {
2785     vp9_build_mask_frame(cm, lf->filter_level, 0);
2786 
2787     if (cpi->num_workers > 1)
2788       vp9_loop_filter_frame_mt(cm->frame_to_show, cm, xd->plane,
2789                                lf->filter_level, 0, 0,
2790                                cpi->workers, cpi->num_workers,
2791                                &cpi->lf_row_sync);
2792     else
2793       vp9_loop_filter_frame(cm->frame_to_show, cm, xd, lf->filter_level, 0, 0);
2794   }
2795 
2796   vpx_extend_frame_inner_borders(cm->frame_to_show);
2797 }
2798 
2799 static INLINE void alloc_frame_mvs(const VP9_COMMON *cm,
2800                                    int buffer_idx) {
2801   RefCntBuffer *const new_fb_ptr = &cm->buffer_pool->frame_bufs[buffer_idx];
2802   if (new_fb_ptr->mvs == NULL ||
2803       new_fb_ptr->mi_rows < cm->mi_rows ||
2804       new_fb_ptr->mi_cols < cm->mi_cols) {
2805     vpx_free(new_fb_ptr->mvs);
2806     new_fb_ptr->mvs =
2807       (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
2808                            sizeof(*new_fb_ptr->mvs));
2809     new_fb_ptr->mi_rows = cm->mi_rows;
2810     new_fb_ptr->mi_cols = cm->mi_cols;
2811   }
2812 }
2813 
2814 void vp9_scale_references(VP9_COMP *cpi) {
2815   VP9_COMMON *cm = &cpi->common;
2816   MV_REFERENCE_FRAME ref_frame;
2817   const VP9_REFFRAME ref_mask[3] = {VP9_LAST_FLAG, VP9_GOLD_FLAG, VP9_ALT_FLAG};
2818 
2819   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2820     // Need to convert from VP9_REFFRAME to index into ref_mask (subtract 1).
2821     if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
2822       BufferPool *const pool = cm->buffer_pool;
2823       const YV12_BUFFER_CONFIG *const ref = get_ref_frame_buffer(cpi,
2824                                                                  ref_frame);
2825 
2826       if (ref == NULL) {
2827         cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
2828         continue;
2829       }
2830 
2831 #if CONFIG_VP9_HIGHBITDEPTH
2832       if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
2833         RefCntBuffer *new_fb_ptr = NULL;
2834         int force_scaling = 0;
2835         int new_fb = cpi->scaled_ref_idx[ref_frame - 1];
2836         if (new_fb == INVALID_IDX) {
2837           new_fb = get_free_fb(cm);
2838           force_scaling = 1;
2839         }
2840         if (new_fb == INVALID_IDX)
2841           return;
2842         new_fb_ptr = &pool->frame_bufs[new_fb];
2843         if (force_scaling ||
2844             new_fb_ptr->buf.y_crop_width != cm->width ||
2845             new_fb_ptr->buf.y_crop_height != cm->height) {
2846           vpx_realloc_frame_buffer(&new_fb_ptr->buf,
2847                                    cm->width, cm->height,
2848                                    cm->subsampling_x, cm->subsampling_y,
2849                                    cm->use_highbitdepth,
2850                                    VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
2851                                    NULL, NULL, NULL);
2852           scale_and_extend_frame(ref, &new_fb_ptr->buf, (int)cm->bit_depth);
2853           cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
2854           alloc_frame_mvs(cm, new_fb);
2855         }
2856 #else
2857       if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
2858         RefCntBuffer *new_fb_ptr = NULL;
2859         int force_scaling = 0;
2860         int new_fb = cpi->scaled_ref_idx[ref_frame - 1];
2861         if (new_fb == INVALID_IDX) {
2862           new_fb = get_free_fb(cm);
2863           force_scaling = 1;
2864         }
2865         if (new_fb == INVALID_IDX)
2866           return;
2867         new_fb_ptr = &pool->frame_bufs[new_fb];
2868         if (force_scaling ||
2869             new_fb_ptr->buf.y_crop_width != cm->width ||
2870             new_fb_ptr->buf.y_crop_height != cm->height) {
2871           vpx_realloc_frame_buffer(&new_fb_ptr->buf,
2872                                    cm->width, cm->height,
2873                                    cm->subsampling_x, cm->subsampling_y,
2874                                    VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
2875                                    NULL, NULL, NULL);
2876           scale_and_extend_frame(ref, &new_fb_ptr->buf);
2877           cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
2878           alloc_frame_mvs(cm, new_fb);
2879         }
2880 #endif  // CONFIG_VP9_HIGHBITDEPTH
2881       } else {
2882         const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
2883         RefCntBuffer *const buf = &pool->frame_bufs[buf_idx];
2884         buf->buf.y_crop_width = ref->y_crop_width;
2885         buf->buf.y_crop_height = ref->y_crop_height;
2886         cpi->scaled_ref_idx[ref_frame - 1] = buf_idx;
2887         ++buf->ref_count;
2888       }
2889     } else {
2890       if (cpi->oxcf.pass != 0 || cpi->use_svc)
2891         cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
2892     }
2893   }
2894 }
2895 
2896 static void release_scaled_references(VP9_COMP *cpi) {
2897   VP9_COMMON *cm = &cpi->common;
2898   int i;
2899   if (cpi->oxcf.pass == 0 && !cpi->use_svc) {
2900     // Only release scaled references under certain conditions:
2901     // if reference will be updated, or if scaled reference has same resolution.
2902     int refresh[3];
2903     refresh[0] = (cpi->refresh_last_frame) ? 1 : 0;
2904     refresh[1] = (cpi->refresh_golden_frame) ? 1 : 0;
2905     refresh[2] = (cpi->refresh_alt_ref_frame) ? 1 : 0;
2906     for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
2907       const int idx = cpi->scaled_ref_idx[i - 1];
2908       RefCntBuffer *const buf = idx != INVALID_IDX ?
2909           &cm->buffer_pool->frame_bufs[idx] : NULL;
2910       const YV12_BUFFER_CONFIG *const ref = get_ref_frame_buffer(cpi, i);
2911       if (buf != NULL &&
2912           (refresh[i - 1] ||
2913           (buf->buf.y_crop_width == ref->y_crop_width &&
2914            buf->buf.y_crop_height == ref->y_crop_height))) {
2915         --buf->ref_count;
2916         cpi->scaled_ref_idx[i -1] = INVALID_IDX;
2917       }
2918     }
2919   } else {
2920     for (i = 0; i < MAX_REF_FRAMES; ++i) {
2921       const int idx = cpi->scaled_ref_idx[i];
2922       RefCntBuffer *const buf = idx != INVALID_IDX ?
2923           &cm->buffer_pool->frame_bufs[idx] : NULL;
2924       if (buf != NULL) {
2925         --buf->ref_count;
2926         cpi->scaled_ref_idx[i] = INVALID_IDX;
2927       }
2928     }
2929   }
2930 }
2931 
2932 static void full_to_model_count(unsigned int *model_count,
2933                                 unsigned int *full_count) {
2934   int n;
2935   model_count[ZERO_TOKEN] = full_count[ZERO_TOKEN];
2936   model_count[ONE_TOKEN] = full_count[ONE_TOKEN];
2937   model_count[TWO_TOKEN] = full_count[TWO_TOKEN];
2938   for (n = THREE_TOKEN; n < EOB_TOKEN; ++n)
2939     model_count[TWO_TOKEN] += full_count[n];
2940   model_count[EOB_MODEL_TOKEN] = full_count[EOB_TOKEN];
2941 }
2942 
2943 static void full_to_model_counts(vp9_coeff_count_model *model_count,
2944                                  vp9_coeff_count *full_count) {
2945   int i, j, k, l;
2946 
2947   for (i = 0; i < PLANE_TYPES; ++i)
2948     for (j = 0; j < REF_TYPES; ++j)
2949       for (k = 0; k < COEF_BANDS; ++k)
2950         for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
2951           full_to_model_count(model_count[i][j][k][l], full_count[i][j][k][l]);
2952 }
2953 
2954 #if 0 && CONFIG_INTERNAL_STATS
2955 static void output_frame_level_debug_stats(VP9_COMP *cpi) {
2956   VP9_COMMON *const cm = &cpi->common;
2957   FILE *const f = fopen("tmp.stt", cm->current_video_frame ? "a" : "w");
2958   int64_t recon_err;
2959 
2960   vpx_clear_system_state();
2961 
2962   recon_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
2963 
2964   if (cpi->twopass.total_left_stats.coded_error != 0.0)
2965     fprintf(f, "%10u %dx%d %d %d %10d %10d %10d %10d"
2966        "%10"PRId64" %10"PRId64" %5d %5d %10"PRId64" "
2967        "%10"PRId64" %10"PRId64" %10d "
2968        "%7.2lf %7.2lf %7.2lf %7.2lf %7.2lf"
2969         "%6d %6d %5d %5d %5d "
2970         "%10"PRId64" %10.3lf"
2971         "%10lf %8u %10"PRId64" %10d %10d %10d\n",
2972         cpi->common.current_video_frame,
2973         cm->width, cm->height,
2974         cpi->rc.source_alt_ref_pending,
2975         cpi->rc.source_alt_ref_active,
2976         cpi->rc.this_frame_target,
2977         cpi->rc.projected_frame_size,
2978         cpi->rc.projected_frame_size / cpi->common.MBs,
2979         (cpi->rc.projected_frame_size - cpi->rc.this_frame_target),
2980         cpi->rc.vbr_bits_off_target,
2981         cpi->rc.vbr_bits_off_target_fast,
2982         cpi->twopass.extend_minq,
2983         cpi->twopass.extend_minq_fast,
2984         cpi->rc.total_target_vs_actual,
2985         (cpi->rc.starting_buffer_level - cpi->rc.bits_off_target),
2986         cpi->rc.total_actual_bits, cm->base_qindex,
2987         vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth),
2988         (double)vp9_dc_quant(cm->base_qindex, 0, cm->bit_depth) / 4.0,
2989         vp9_convert_qindex_to_q(cpi->twopass.active_worst_quality,
2990                                 cm->bit_depth),
2991         cpi->rc.avg_q,
2992         vp9_convert_qindex_to_q(cpi->oxcf.cq_level, cm->bit_depth),
2993         cpi->refresh_last_frame, cpi->refresh_golden_frame,
2994         cpi->refresh_alt_ref_frame, cm->frame_type, cpi->rc.gfu_boost,
2995         cpi->twopass.bits_left,
2996         cpi->twopass.total_left_stats.coded_error,
2997         cpi->twopass.bits_left /
2998             (1 + cpi->twopass.total_left_stats.coded_error),
2999         cpi->tot_recode_hits, recon_err, cpi->rc.kf_boost,
3000         cpi->twopass.kf_zeromotion_pct,
3001         cpi->twopass.fr_content_type);
3002 
3003   fclose(f);
3004 
3005   if (0) {
3006     FILE *const fmodes = fopen("Modes.stt", "a");
3007     int i;
3008 
3009     fprintf(fmodes, "%6d:%1d:%1d:%1d ", cpi->common.current_video_frame,
3010             cm->frame_type, cpi->refresh_golden_frame,
3011             cpi->refresh_alt_ref_frame);
3012 
3013     for (i = 0; i < MAX_MODES; ++i)
3014       fprintf(fmodes, "%5d ", cpi->mode_chosen_counts[i]);
3015 
3016     fprintf(fmodes, "\n");
3017 
3018     fclose(fmodes);
3019   }
3020 }
3021 #endif
3022 
3023 static void set_mv_search_params(VP9_COMP *cpi) {
3024   const VP9_COMMON *const cm = &cpi->common;
3025   const unsigned int max_mv_def = VPXMIN(cm->width, cm->height);
3026 
3027   // Default based on max resolution.
3028   cpi->mv_step_param = vp9_init_search_range(max_mv_def);
3029 
3030   if (cpi->sf.mv.auto_mv_step_size) {
3031     if (frame_is_intra_only(cm)) {
3032       // Initialize max_mv_magnitude for use in the first INTER frame
3033       // after a key/intra-only frame.
3034       cpi->max_mv_magnitude = max_mv_def;
3035     } else {
3036       if (cm->show_frame) {
3037         // Allow mv_steps to correspond to twice the max mv magnitude found
3038         // in the previous frame, capped by the default max_mv_magnitude based
3039         // on resolution.
3040         cpi->mv_step_param = vp9_init_search_range(
3041             VPXMIN(max_mv_def, 2 * cpi->max_mv_magnitude));
3042       }
3043       cpi->max_mv_magnitude = 0;
3044     }
3045   }
3046 }
3047 
3048 static void set_size_independent_vars(VP9_COMP *cpi) {
3049   vp9_set_speed_features_framesize_independent(cpi);
3050   vp9_set_rd_speed_thresholds(cpi);
3051   vp9_set_rd_speed_thresholds_sub8x8(cpi);
3052   cpi->common.interp_filter = cpi->sf.default_interp_filter;
3053 }
3054 
3055 static void set_size_dependent_vars(VP9_COMP *cpi, int *q,
3056                                     int *bottom_index, int *top_index) {
3057   VP9_COMMON *const cm = &cpi->common;
3058   const VP9EncoderConfig *const oxcf = &cpi->oxcf;
3059 
3060   // Setup variables that depend on the dimensions of the frame.
3061   vp9_set_speed_features_framesize_dependent(cpi);
3062 
3063   // Decide q and q bounds.
3064   *q = vp9_rc_pick_q_and_bounds(cpi, bottom_index, top_index);
3065 
3066   if (!frame_is_intra_only(cm)) {
3067     vp9_set_high_precision_mv(cpi, (*q) < HIGH_PRECISION_MV_QTHRESH);
3068   }
3069 
3070   // Configure experimental use of segmentation for enhanced coding of
3071   // static regions if indicated.
3072   // Only allowed in the second pass of a two pass encode, as it requires
3073   // lagged coding, and if the relevant speed feature flag is set.
3074   if (oxcf->pass == 2 && cpi->sf.static_segmentation)
3075     configure_static_seg_features(cpi);
3076 
3077 #if CONFIG_VP9_POSTPROC
3078   if (oxcf->noise_sensitivity > 0) {
3079     int l = 0;
3080     switch (oxcf->noise_sensitivity) {
3081       case 1:
3082         l = 20;
3083         break;
3084       case 2:
3085         l = 40;
3086         break;
3087       case 3:
3088         l = 60;
3089         break;
3090       case 4:
3091       case 5:
3092         l = 100;
3093         break;
3094       case 6:
3095         l = 150;
3096         break;
3097     }
3098     vp9_denoise(cpi->Source, cpi->Source, l);
3099   }
3100 #endif  // CONFIG_VP9_POSTPROC
3101 }
3102 
3103 #if CONFIG_VP9_TEMPORAL_DENOISING
3104 static void setup_denoiser_buffer(VP9_COMP *cpi) {
3105   VP9_COMMON *const cm = &cpi->common;
3106   if (cpi->oxcf.noise_sensitivity > 0 &&
3107       !cpi->denoiser.frame_buffer_initialized) {
3108     vp9_denoiser_alloc(&(cpi->denoiser), cm->width, cm->height,
3109                        cm->subsampling_x, cm->subsampling_y,
3110 #if CONFIG_VP9_HIGHBITDEPTH
3111                        cm->use_highbitdepth,
3112 #endif
3113                        VP9_ENC_BORDER_IN_PIXELS);
3114   }
3115 }
3116 #endif
3117 
3118 static void init_motion_estimation(VP9_COMP *cpi) {
3119   int y_stride = cpi->scaled_source.y_stride;
3120 
3121   if (cpi->sf.mv.search_method == NSTEP) {
3122     vp9_init3smotion_compensation(&cpi->ss_cfg, y_stride);
3123   } else if (cpi->sf.mv.search_method == DIAMOND) {
3124     vp9_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
3125   }
3126 }
3127 
3128 static void set_frame_size(VP9_COMP *cpi) {
3129   int ref_frame;
3130   VP9_COMMON *const cm = &cpi->common;
3131   VP9EncoderConfig *const oxcf = &cpi->oxcf;
3132   MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3133 
3134   if (oxcf->pass == 2 &&
3135       oxcf->rc_mode == VPX_VBR &&
3136       ((oxcf->resize_mode == RESIZE_FIXED && cm->current_video_frame == 0) ||
3137         (oxcf->resize_mode == RESIZE_DYNAMIC && cpi->resize_pending))) {
3138     calculate_coded_size(
3139         cpi, &oxcf->scaled_frame_width, &oxcf->scaled_frame_height);
3140 
3141     // There has been a change in frame size.
3142     vp9_set_size_literal(cpi, oxcf->scaled_frame_width,
3143                          oxcf->scaled_frame_height);
3144   }
3145 
3146   if (oxcf->pass == 0 &&
3147       oxcf->rc_mode == VPX_CBR &&
3148       !cpi->use_svc &&
3149       oxcf->resize_mode == RESIZE_DYNAMIC &&
3150       cpi->resize_pending != 0) {
3151     oxcf->scaled_frame_width =
3152         (oxcf->width * cpi->resize_scale_num) / cpi->resize_scale_den;
3153     oxcf->scaled_frame_height =
3154         (oxcf->height * cpi->resize_scale_num) /cpi->resize_scale_den;
3155     // There has been a change in frame size.
3156     vp9_set_size_literal(cpi,
3157                          oxcf->scaled_frame_width,
3158                          oxcf->scaled_frame_height);
3159 
3160     // TODO(agrange) Scale cpi->max_mv_magnitude if frame-size has changed.
3161     set_mv_search_params(cpi);
3162 
3163 #if CONFIG_VP9_TEMPORAL_DENOISING
3164     // Reset the denoiser on the resized frame.
3165     if (cpi->oxcf.noise_sensitivity > 0) {
3166       vp9_denoiser_free(&(cpi->denoiser));
3167       setup_denoiser_buffer(cpi);
3168       // Dynamic resize is only triggered for non-SVC, so we can force
3169       // golden frame update here as temporary fix to denoiser.
3170       cpi->refresh_golden_frame = 1;
3171     }
3172 #endif
3173   }
3174 
3175   if ((oxcf->pass == 2) &&
3176       (!cpi->use_svc ||
3177           (is_two_pass_svc(cpi) &&
3178               cpi->svc.encode_empty_frame_state != ENCODING))) {
3179     vp9_set_target_rate(cpi);
3180   }
3181 
3182   alloc_frame_mvs(cm, cm->new_fb_idx);
3183 
3184   // Reset the frame pointers to the current frame size.
3185   vpx_realloc_frame_buffer(get_frame_new_buffer(cm),
3186                            cm->width, cm->height,
3187                            cm->subsampling_x, cm->subsampling_y,
3188 #if CONFIG_VP9_HIGHBITDEPTH
3189                            cm->use_highbitdepth,
3190 #endif
3191                            VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
3192                            NULL, NULL, NULL);
3193 
3194   alloc_util_frame_buffers(cpi);
3195   init_motion_estimation(cpi);
3196 
3197   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3198     RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - 1];
3199     const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
3200 
3201     ref_buf->idx = buf_idx;
3202 
3203     if (buf_idx != INVALID_IDX) {
3204       YV12_BUFFER_CONFIG *const buf = &cm->buffer_pool->frame_bufs[buf_idx].buf;
3205       ref_buf->buf = buf;
3206 #if CONFIG_VP9_HIGHBITDEPTH
3207       vp9_setup_scale_factors_for_frame(&ref_buf->sf,
3208                                         buf->y_crop_width, buf->y_crop_height,
3209                                         cm->width, cm->height,
3210                                         (buf->flags & YV12_FLAG_HIGHBITDEPTH) ?
3211                                             1 : 0);
3212 #else
3213       vp9_setup_scale_factors_for_frame(&ref_buf->sf,
3214                                         buf->y_crop_width, buf->y_crop_height,
3215                                         cm->width, cm->height);
3216 #endif  // CONFIG_VP9_HIGHBITDEPTH
3217       if (vp9_is_scaled(&ref_buf->sf))
3218         vpx_extend_frame_borders(buf);
3219     } else {
3220       ref_buf->buf = NULL;
3221     }
3222   }
3223 
3224   set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
3225 }
3226 
3227 static void encode_without_recode_loop(VP9_COMP *cpi,
3228                                        size_t *size,
3229                                        uint8_t *dest) {
3230   VP9_COMMON *const cm = &cpi->common;
3231   int q = 0, bottom_index = 0, top_index = 0;  // Dummy variables.
3232 
3233   vpx_clear_system_state();
3234 
3235   set_frame_size(cpi);
3236 
3237   cpi->Source = vp9_scale_if_required(cm,
3238                                       cpi->un_scaled_source,
3239                                       &cpi->scaled_source,
3240                                       (cpi->oxcf.pass == 0));
3241 
3242   // Avoid scaling last_source unless its needed.
3243   // Last source is currently only used for screen-content mode,
3244   // or if partition_search_type == SOURCE_VAR_BASED_PARTITION.
3245   if (cpi->unscaled_last_source != NULL &&
3246       (cpi->oxcf.content == VP9E_CONTENT_SCREEN ||
3247       cpi->sf.partition_search_type == SOURCE_VAR_BASED_PARTITION))
3248     cpi->Last_Source = vp9_scale_if_required(cm,
3249                                              cpi->unscaled_last_source,
3250                                              &cpi->scaled_last_source,
3251                                              (cpi->oxcf.pass == 0));
3252 
3253 #if CONFIG_VP9_TEMPORAL_DENOISING
3254   if (cpi->oxcf.noise_sensitivity > 0 &&
3255       cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3256     vp9_denoiser_update_noise_estimate(cpi);
3257   }
3258 #endif
3259 
3260   if (cpi->oxcf.pass == 0 &&
3261       cpi->oxcf.rc_mode == VPX_CBR &&
3262       cpi->resize_state == 0 &&
3263       cm->frame_type != KEY_FRAME &&
3264       cpi->oxcf.content == VP9E_CONTENT_SCREEN)
3265     vp9_avg_source_sad(cpi);
3266 
3267   if (frame_is_intra_only(cm) == 0) {
3268     vp9_scale_references(cpi);
3269   }
3270 
3271   set_size_independent_vars(cpi);
3272   set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
3273 
3274   vp9_set_quantizer(cm, q);
3275   vp9_set_variance_partition_thresholds(cpi, q);
3276 
3277   setup_frame(cpi);
3278 
3279   suppress_active_map(cpi);
3280   // Variance adaptive and in frame q adjustment experiments are mutually
3281   // exclusive.
3282   if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
3283     vp9_vaq_frame_setup(cpi);
3284   } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
3285     vp9_setup_in_frame_q_adj(cpi);
3286   } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3287     vp9_cyclic_refresh_setup(cpi);
3288   }
3289   apply_active_map(cpi);
3290 
3291   // transform / motion compensation build reconstruction frame
3292   vp9_encode_frame(cpi);
3293 
3294   // Check if we should drop this frame because of high overshoot.
3295   // Only for frames where high temporal-source sad is detected.
3296   if (cpi->oxcf.pass == 0 &&
3297       cpi->oxcf.rc_mode == VPX_CBR &&
3298       cpi->resize_state == 0 &&
3299       cm->frame_type != KEY_FRAME &&
3300       cpi->oxcf.content == VP9E_CONTENT_SCREEN &&
3301       cpi->rc.high_source_sad == 1) {
3302     int frame_size = 0;
3303     // Get an estimate of the encoded frame size.
3304     save_coding_context(cpi);
3305     vp9_pack_bitstream(cpi, dest, size);
3306     restore_coding_context(cpi);
3307     frame_size = (int)(*size) << 3;
3308     // Check if encoded frame will overshoot too much, and if so, set the q and
3309     // adjust some rate control parameters, and return to re-encode the frame.
3310     if (vp9_encodedframe_overshoot(cpi, frame_size, &q)) {
3311       vpx_clear_system_state();
3312       vp9_set_quantizer(cm, q);
3313       vp9_set_variance_partition_thresholds(cpi, q);
3314       suppress_active_map(cpi);
3315       // Turn-off cyclic refresh for re-encoded frame.
3316       if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3317         unsigned char *const seg_map = cpi->segmentation_map;
3318         memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
3319         vp9_disable_segmentation(&cm->seg);
3320       }
3321       apply_active_map(cpi);
3322       vp9_encode_frame(cpi);
3323     }
3324   }
3325 
3326   // Update some stats from cyclic refresh, and check if we should not update
3327   // golden reference, for non-SVC 1 pass CBR.
3328   if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ &&
3329       cm->frame_type != KEY_FRAME &&
3330       !cpi->use_svc &&
3331       cpi->ext_refresh_frame_flags_pending == 0 &&
3332       (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == VPX_CBR))
3333     vp9_cyclic_refresh_check_golden_update(cpi);
3334 
3335   // Update the skip mb flag probabilities based on the distribution
3336   // seen in the last encoder iteration.
3337   // update_base_skip_probs(cpi);
3338   vpx_clear_system_state();
3339 }
3340 
3341 static void encode_with_recode_loop(VP9_COMP *cpi,
3342                                     size_t *size,
3343                                     uint8_t *dest) {
3344   VP9_COMMON *const cm = &cpi->common;
3345   RATE_CONTROL *const rc = &cpi->rc;
3346   int bottom_index, top_index;
3347   int loop_count = 0;
3348   int loop_at_this_size = 0;
3349   int loop = 0;
3350   int overshoot_seen = 0;
3351   int undershoot_seen = 0;
3352   int frame_over_shoot_limit;
3353   int frame_under_shoot_limit;
3354   int q = 0, q_low = 0, q_high = 0;
3355 
3356   set_size_independent_vars(cpi);
3357 
3358   do {
3359     vpx_clear_system_state();
3360 
3361     set_frame_size(cpi);
3362 
3363     if (loop_count == 0 || cpi->resize_pending != 0) {
3364       set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
3365 
3366       // TODO(agrange) Scale cpi->max_mv_magnitude if frame-size has changed.
3367       set_mv_search_params(cpi);
3368 
3369       // Reset the loop state for new frame size.
3370       overshoot_seen = 0;
3371       undershoot_seen = 0;
3372 
3373       // Reconfiguration for change in frame size has concluded.
3374       cpi->resize_pending = 0;
3375 
3376       q_low = bottom_index;
3377       q_high = top_index;
3378 
3379       loop_at_this_size = 0;
3380     }
3381 
3382     // Decide frame size bounds first time through.
3383     if (loop_count == 0) {
3384       vp9_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
3385                                        &frame_under_shoot_limit,
3386                                        &frame_over_shoot_limit);
3387     }
3388 
3389     cpi->Source = vp9_scale_if_required(cm, cpi->un_scaled_source,
3390                                       &cpi->scaled_source,
3391                                       (cpi->oxcf.pass == 0));
3392 
3393     if (cpi->unscaled_last_source != NULL)
3394       cpi->Last_Source = vp9_scale_if_required(cm, cpi->unscaled_last_source,
3395                                                &cpi->scaled_last_source,
3396                                                (cpi->oxcf.pass == 0));
3397 
3398     if (frame_is_intra_only(cm) == 0) {
3399       if (loop_count > 0) {
3400         release_scaled_references(cpi);
3401       }
3402       vp9_scale_references(cpi);
3403     }
3404 
3405     vp9_set_quantizer(cm, q);
3406 
3407     if (loop_count == 0)
3408       setup_frame(cpi);
3409 
3410     // Variance adaptive and in frame q adjustment experiments are mutually
3411     // exclusive.
3412     if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
3413       vp9_vaq_frame_setup(cpi);
3414     } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
3415       vp9_setup_in_frame_q_adj(cpi);
3416     }
3417 
3418     // transform / motion compensation build reconstruction frame
3419     vp9_encode_frame(cpi);
3420 
3421     // Update the skip mb flag probabilities based on the distribution
3422     // seen in the last encoder iteration.
3423     // update_base_skip_probs(cpi);
3424 
3425     vpx_clear_system_state();
3426 
3427     // Dummy pack of the bitstream using up to date stats to get an
3428     // accurate estimate of output frame size to determine if we need
3429     // to recode.
3430     if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
3431       save_coding_context(cpi);
3432       if (!cpi->sf.use_nonrd_pick_mode)
3433         vp9_pack_bitstream(cpi, dest, size);
3434 
3435       rc->projected_frame_size = (int)(*size) << 3;
3436       restore_coding_context(cpi);
3437 
3438       if (frame_over_shoot_limit == 0)
3439         frame_over_shoot_limit = 1;
3440     }
3441 
3442     if (cpi->oxcf.rc_mode == VPX_Q) {
3443       loop = 0;
3444     } else {
3445       if ((cm->frame_type == KEY_FRAME) &&
3446            rc->this_key_frame_forced &&
3447            (rc->projected_frame_size < rc->max_frame_bandwidth)) {
3448         int last_q = q;
3449         int64_t kf_err;
3450 
3451         int64_t high_err_target = cpi->ambient_err;
3452         int64_t low_err_target = cpi->ambient_err >> 1;
3453 
3454 #if CONFIG_VP9_HIGHBITDEPTH
3455         if (cm->use_highbitdepth) {
3456           kf_err = vp9_highbd_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3457         } else {
3458           kf_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3459         }
3460 #else
3461         kf_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3462 #endif  // CONFIG_VP9_HIGHBITDEPTH
3463 
3464         // Prevent possible divide by zero error below for perfect KF
3465         kf_err += !kf_err;
3466 
3467         // The key frame is not good enough or we can afford
3468         // to make it better without undue risk of popping.
3469         if ((kf_err > high_err_target &&
3470              rc->projected_frame_size <= frame_over_shoot_limit) ||
3471             (kf_err > low_err_target &&
3472              rc->projected_frame_size <= frame_under_shoot_limit)) {
3473           // Lower q_high
3474           q_high = q > q_low ? q - 1 : q_low;
3475 
3476           // Adjust Q
3477           q = (int)((q * high_err_target) / kf_err);
3478           q = VPXMIN(q, (q_high + q_low) >> 1);
3479         } else if (kf_err < low_err_target &&
3480                    rc->projected_frame_size >= frame_under_shoot_limit) {
3481           // The key frame is much better than the previous frame
3482           // Raise q_low
3483           q_low = q < q_high ? q + 1 : q_high;
3484 
3485           // Adjust Q
3486           q = (int)((q * low_err_target) / kf_err);
3487           q = VPXMIN(q, (q_high + q_low + 1) >> 1);
3488         }
3489 
3490         // Clamp Q to upper and lower limits:
3491         q = clamp(q, q_low, q_high);
3492 
3493         loop = q != last_q;
3494       } else if (recode_loop_test(
3495           cpi, frame_over_shoot_limit, frame_under_shoot_limit,
3496           q, VPXMAX(q_high, top_index), bottom_index)) {
3497         // Is the projected frame size out of range and are we allowed
3498         // to attempt to recode.
3499         int last_q = q;
3500         int retries = 0;
3501 
3502         if (cpi->resize_pending == 1) {
3503           // Change in frame size so go back around the recode loop.
3504           cpi->rc.frame_size_selector =
3505               SCALE_STEP1 - cpi->rc.frame_size_selector;
3506           cpi->rc.next_frame_size_selector = cpi->rc.frame_size_selector;
3507 
3508 #if CONFIG_INTERNAL_STATS
3509           ++cpi->tot_recode_hits;
3510 #endif
3511           ++loop_count;
3512           loop = 1;
3513           continue;
3514         }
3515 
3516         // Frame size out of permitted range:
3517         // Update correction factor & compute new Q to try...
3518 
3519         // Frame is too large
3520         if (rc->projected_frame_size > rc->this_frame_target) {
3521           // Special case if the projected size is > the max allowed.
3522           if (rc->projected_frame_size >= rc->max_frame_bandwidth)
3523             q_high = rc->worst_quality;
3524 
3525           // Raise Qlow as to at least the current value
3526           q_low = q < q_high ? q + 1 : q_high;
3527 
3528           if (undershoot_seen || loop_at_this_size > 1) {
3529             // Update rate_correction_factor unless
3530             vp9_rc_update_rate_correction_factors(cpi);
3531 
3532             q = (q_high + q_low + 1) / 2;
3533           } else {
3534             // Update rate_correction_factor unless
3535             vp9_rc_update_rate_correction_factors(cpi);
3536 
3537             q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3538                                   bottom_index, VPXMAX(q_high, top_index));
3539 
3540             while (q < q_low && retries < 10) {
3541               vp9_rc_update_rate_correction_factors(cpi);
3542               q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3543                                     bottom_index, VPXMAX(q_high, top_index));
3544               retries++;
3545             }
3546           }
3547 
3548           overshoot_seen = 1;
3549         } else {
3550           // Frame is too small
3551           q_high = q > q_low ? q - 1 : q_low;
3552 
3553           if (overshoot_seen || loop_at_this_size > 1) {
3554             vp9_rc_update_rate_correction_factors(cpi);
3555             q = (q_high + q_low) / 2;
3556           } else {
3557             vp9_rc_update_rate_correction_factors(cpi);
3558             q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3559                                    bottom_index, top_index);
3560             // Special case reset for qlow for constrained quality.
3561             // This should only trigger where there is very substantial
3562             // undershoot on a frame and the auto cq level is above
3563             // the user passsed in value.
3564             if (cpi->oxcf.rc_mode == VPX_CQ &&
3565                 q < q_low) {
3566               q_low = q;
3567             }
3568 
3569             while (q > q_high && retries < 10) {
3570               vp9_rc_update_rate_correction_factors(cpi);
3571               q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3572                                      bottom_index, top_index);
3573               retries++;
3574             }
3575           }
3576 
3577           undershoot_seen = 1;
3578         }
3579 
3580         // Clamp Q to upper and lower limits:
3581         q = clamp(q, q_low, q_high);
3582 
3583         loop = (q != last_q);
3584       } else {
3585         loop = 0;
3586       }
3587     }
3588 
3589     // Special case for overlay frame.
3590     if (rc->is_src_frame_alt_ref &&
3591         rc->projected_frame_size < rc->max_frame_bandwidth)
3592       loop = 0;
3593 
3594     if (loop) {
3595       ++loop_count;
3596       ++loop_at_this_size;
3597 
3598 #if CONFIG_INTERNAL_STATS
3599       ++cpi->tot_recode_hits;
3600 #endif
3601     }
3602   } while (loop);
3603 }
3604 
3605 static int get_ref_frame_flags(const VP9_COMP *cpi) {
3606   const int *const map = cpi->common.ref_frame_map;
3607   const int gold_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx];
3608   const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx];
3609   const int gold_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx];
3610   int flags = VP9_ALT_FLAG | VP9_GOLD_FLAG | VP9_LAST_FLAG;
3611 
3612   if (gold_is_last)
3613     flags &= ~VP9_GOLD_FLAG;
3614 
3615   if (cpi->rc.frames_till_gf_update_due == INT_MAX &&
3616       (cpi->svc.number_temporal_layers == 1 &&
3617        cpi->svc.number_spatial_layers == 1))
3618     flags &= ~VP9_GOLD_FLAG;
3619 
3620   if (alt_is_last)
3621     flags &= ~VP9_ALT_FLAG;
3622 
3623   if (gold_is_alt)
3624     flags &= ~VP9_ALT_FLAG;
3625 
3626   return flags;
3627 }
3628 
3629 static void set_ext_overrides(VP9_COMP *cpi) {
3630   // Overrides the defaults with the externally supplied values with
3631   // vp9_update_reference() and vp9_update_entropy() calls
3632   // Note: The overrides are valid only for the next frame passed
3633   // to encode_frame_to_data_rate() function
3634   if (cpi->ext_refresh_frame_context_pending) {
3635     cpi->common.refresh_frame_context = cpi->ext_refresh_frame_context;
3636     cpi->ext_refresh_frame_context_pending = 0;
3637   }
3638   if (cpi->ext_refresh_frame_flags_pending) {
3639     cpi->refresh_last_frame = cpi->ext_refresh_last_frame;
3640     cpi->refresh_golden_frame = cpi->ext_refresh_golden_frame;
3641     cpi->refresh_alt_ref_frame = cpi->ext_refresh_alt_ref_frame;
3642   }
3643 }
3644 
3645 YV12_BUFFER_CONFIG *vp9_scale_if_required(VP9_COMMON *cm,
3646                                           YV12_BUFFER_CONFIG *unscaled,
3647                                           YV12_BUFFER_CONFIG *scaled,
3648                                           int use_normative_scaler) {
3649   if (cm->mi_cols * MI_SIZE != unscaled->y_width ||
3650       cm->mi_rows * MI_SIZE != unscaled->y_height) {
3651 #if CONFIG_VP9_HIGHBITDEPTH
3652     if (use_normative_scaler)
3653       scale_and_extend_frame(unscaled, scaled, (int)cm->bit_depth);
3654     else
3655       scale_and_extend_frame_nonnormative(unscaled, scaled, (int)cm->bit_depth);
3656 #else
3657     if (use_normative_scaler)
3658       scale_and_extend_frame(unscaled, scaled);
3659     else
3660       scale_and_extend_frame_nonnormative(unscaled, scaled);
3661 #endif  // CONFIG_VP9_HIGHBITDEPTH
3662     return scaled;
3663   } else {
3664     return unscaled;
3665   }
3666 }
3667 
3668 static void set_arf_sign_bias(VP9_COMP *cpi) {
3669   VP9_COMMON *const cm = &cpi->common;
3670   int arf_sign_bias;
3671 
3672   if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
3673     const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3674     arf_sign_bias = cpi->rc.source_alt_ref_active &&
3675                     (!cpi->refresh_alt_ref_frame ||
3676                      (gf_group->rf_level[gf_group->index] == GF_ARF_LOW));
3677   } else {
3678     arf_sign_bias =
3679       (cpi->rc.source_alt_ref_active && !cpi->refresh_alt_ref_frame);
3680   }
3681   cm->ref_frame_sign_bias[ALTREF_FRAME] = arf_sign_bias;
3682 }
3683 
3684 static int setup_interp_filter_search_mask(VP9_COMP *cpi) {
3685   INTERP_FILTER ifilter;
3686   int ref_total[MAX_REF_FRAMES] = {0};
3687   MV_REFERENCE_FRAME ref;
3688   int mask = 0;
3689   if (cpi->common.last_frame_type == KEY_FRAME ||
3690       cpi->refresh_alt_ref_frame)
3691     return mask;
3692   for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
3693     for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter)
3694       ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
3695 
3696   for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter) {
3697     if ((ref_total[LAST_FRAME] &&
3698         cpi->interp_filter_selected[LAST_FRAME][ifilter] == 0) &&
3699         (ref_total[GOLDEN_FRAME] == 0 ||
3700          cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 50
3701            < ref_total[GOLDEN_FRAME]) &&
3702         (ref_total[ALTREF_FRAME] == 0 ||
3703          cpi->interp_filter_selected[ALTREF_FRAME][ifilter] * 50
3704            < ref_total[ALTREF_FRAME]))
3705       mask |= 1 << ifilter;
3706   }
3707   return mask;
3708 }
3709 
3710 static void encode_frame_to_data_rate(VP9_COMP *cpi,
3711                                       size_t *size,
3712                                       uint8_t *dest,
3713                                       unsigned int *frame_flags) {
3714   VP9_COMMON *const cm = &cpi->common;
3715   const VP9EncoderConfig *const oxcf = &cpi->oxcf;
3716   struct segmentation *const seg = &cm->seg;
3717   TX_SIZE t;
3718 
3719   set_ext_overrides(cpi);
3720   vpx_clear_system_state();
3721 
3722   // Set the arf sign bias for this frame.
3723   set_arf_sign_bias(cpi);
3724 
3725   // Set default state for segment based loop filter update flags.
3726   cm->lf.mode_ref_delta_update = 0;
3727 
3728   if (cpi->oxcf.pass == 2 &&
3729       cpi->sf.adaptive_interp_filter_search)
3730     cpi->sf.interp_filter_search_mask =
3731         setup_interp_filter_search_mask(cpi);
3732 
3733   // Set various flags etc to special state if it is a key frame.
3734   if (frame_is_intra_only(cm)) {
3735     // Reset the loop filter deltas and segmentation map.
3736     vp9_reset_segment_features(&cm->seg);
3737 
3738     // If segmentation is enabled force a map update for key frames.
3739     if (seg->enabled) {
3740       seg->update_map = 1;
3741       seg->update_data = 1;
3742     }
3743 
3744     // The alternate reference frame cannot be active for a key frame.
3745     cpi->rc.source_alt_ref_active = 0;
3746 
3747     cm->error_resilient_mode = oxcf->error_resilient_mode;
3748     cm->frame_parallel_decoding_mode = oxcf->frame_parallel_decoding_mode;
3749 
3750     // By default, encoder assumes decoder can use prev_mi.
3751     if (cm->error_resilient_mode) {
3752       cm->frame_parallel_decoding_mode = 1;
3753       cm->reset_frame_context = 0;
3754       cm->refresh_frame_context = 0;
3755     } else if (cm->intra_only) {
3756       // Only reset the current context.
3757       cm->reset_frame_context = 2;
3758     }
3759   }
3760   if (is_two_pass_svc(cpi) && cm->error_resilient_mode == 0) {
3761     // Use context 0 for intra only empty frame, but the last frame context
3762     // for other empty frames.
3763     if (cpi->svc.encode_empty_frame_state == ENCODING) {
3764       if (cpi->svc.encode_intra_empty_frame != 0)
3765         cm->frame_context_idx = 0;
3766       else
3767         cm->frame_context_idx = FRAME_CONTEXTS - 1;
3768     } else {
3769     cm->frame_context_idx =
3770         cpi->svc.spatial_layer_id * cpi->svc.number_temporal_layers +
3771         cpi->svc.temporal_layer_id;
3772     }
3773 
3774     cm->frame_parallel_decoding_mode = oxcf->frame_parallel_decoding_mode;
3775 
3776     // The probs will be updated based on the frame type of its previous
3777     // frame if frame_parallel_decoding_mode is 0. The type may vary for
3778     // the frame after a key frame in base layer since we may drop enhancement
3779     // layers. So set frame_parallel_decoding_mode to 1 in this case.
3780     if (cm->frame_parallel_decoding_mode == 0) {
3781       if (cpi->svc.number_temporal_layers == 1) {
3782         if (cpi->svc.spatial_layer_id == 0 &&
3783             cpi->svc.layer_context[0].last_frame_type == KEY_FRAME)
3784           cm->frame_parallel_decoding_mode = 1;
3785       } else if (cpi->svc.spatial_layer_id == 0) {
3786         // Find the 2nd frame in temporal base layer and 1st frame in temporal
3787         // enhancement layers from the key frame.
3788         int i;
3789         for (i = 0; i < cpi->svc.number_temporal_layers; ++i) {
3790           if (cpi->svc.layer_context[0].frames_from_key_frame == 1 << i) {
3791             cm->frame_parallel_decoding_mode = 1;
3792             break;
3793           }
3794         }
3795       }
3796     }
3797   }
3798 
3799   // For 1 pass CBR, check if we are dropping this frame.
3800   // Never drop on key frame.
3801   if (oxcf->pass == 0 &&
3802       oxcf->rc_mode == VPX_CBR &&
3803       cm->frame_type != KEY_FRAME) {
3804     if (vp9_rc_drop_frame(cpi)) {
3805       vp9_rc_postencode_update_drop_frame(cpi);
3806       ++cm->current_video_frame;
3807       cpi->ext_refresh_frame_flags_pending = 0;
3808       return;
3809     }
3810   }
3811 
3812   vpx_clear_system_state();
3813 
3814 #if CONFIG_INTERNAL_STATS
3815   memset(cpi->mode_chosen_counts, 0,
3816          MAX_MODES * sizeof(*cpi->mode_chosen_counts));
3817 #endif
3818 
3819   if (cpi->sf.recode_loop == DISALLOW_RECODE) {
3820     encode_without_recode_loop(cpi, size, dest);
3821   } else {
3822     encode_with_recode_loop(cpi, size, dest);
3823   }
3824 
3825 #if CONFIG_VP9_TEMPORAL_DENOISING
3826 #ifdef OUTPUT_YUV_DENOISED
3827   if (oxcf->noise_sensitivity > 0) {
3828     vp9_write_yuv_frame_420(&cpi->denoiser.running_avg_y[INTRA_FRAME],
3829                             yuv_denoised_file);
3830   }
3831 #endif
3832 #endif
3833 #ifdef OUTPUT_YUV_SKINMAP
3834   if (cpi->common.current_video_frame > 1) {
3835     vp9_compute_skin_map(cpi, yuv_skinmap_file);
3836   }
3837 #endif
3838 
3839   // Special case code to reduce pulsing when key frames are forced at a
3840   // fixed interval. Note the reconstruction error if it is the frame before
3841   // the force key frame
3842   if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
3843 #if CONFIG_VP9_HIGHBITDEPTH
3844     if (cm->use_highbitdepth) {
3845       cpi->ambient_err = vp9_highbd_get_y_sse(cpi->Source,
3846                                               get_frame_new_buffer(cm));
3847     } else {
3848       cpi->ambient_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3849     }
3850 #else
3851     cpi->ambient_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3852 #endif  // CONFIG_VP9_HIGHBITDEPTH
3853   }
3854 
3855   // If the encoder forced a KEY_FRAME decision
3856   if (cm->frame_type == KEY_FRAME)
3857     cpi->refresh_last_frame = 1;
3858 
3859   cm->frame_to_show = get_frame_new_buffer(cm);
3860   cm->frame_to_show->color_space = cm->color_space;
3861   cm->frame_to_show->color_range = cm->color_range;
3862   cm->frame_to_show->render_width  = cm->render_width;
3863   cm->frame_to_show->render_height = cm->render_height;
3864 
3865   // Pick the loop filter level for the frame.
3866   loopfilter_frame(cpi, cm);
3867 
3868   // build the bitstream
3869   vp9_pack_bitstream(cpi, dest, size);
3870 
3871   if (cm->seg.update_map)
3872     update_reference_segmentation_map(cpi);
3873 
3874   if (frame_is_intra_only(cm) == 0) {
3875     release_scaled_references(cpi);
3876   }
3877   vp9_update_reference_frames(cpi);
3878 
3879   for (t = TX_4X4; t <= TX_32X32; t++)
3880     full_to_model_counts(cpi->td.counts->coef[t],
3881                          cpi->td.rd_counts.coef_counts[t]);
3882 
3883   if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode)
3884     vp9_adapt_coef_probs(cm);
3885 
3886   if (!frame_is_intra_only(cm)) {
3887     if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode) {
3888       vp9_adapt_mode_probs(cm);
3889       vp9_adapt_mv_probs(cm, cm->allow_high_precision_mv);
3890     }
3891   }
3892 
3893   cpi->ext_refresh_frame_flags_pending = 0;
3894 
3895   if (cpi->refresh_golden_frame == 1)
3896     cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
3897   else
3898     cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
3899 
3900   if (cpi->refresh_alt_ref_frame == 1)
3901     cpi->frame_flags |= FRAMEFLAGS_ALTREF;
3902   else
3903     cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
3904 
3905   cpi->ref_frame_flags = get_ref_frame_flags(cpi);
3906 
3907   cm->last_frame_type = cm->frame_type;
3908 
3909   if (!(is_two_pass_svc(cpi) && cpi->svc.encode_empty_frame_state == ENCODING))
3910     vp9_rc_postencode_update(cpi, *size);
3911 
3912 #if 0
3913   output_frame_level_debug_stats(cpi);
3914 #endif
3915 
3916   if (cm->frame_type == KEY_FRAME) {
3917     // Tell the caller that the frame was coded as a key frame
3918     *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
3919   } else {
3920     *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
3921   }
3922 
3923   // Clear the one shot update flags for segmentation map and mode/ref loop
3924   // filter deltas.
3925   cm->seg.update_map = 0;
3926   cm->seg.update_data = 0;
3927   cm->lf.mode_ref_delta_update = 0;
3928 
3929   // keep track of the last coded dimensions
3930   cm->last_width = cm->width;
3931   cm->last_height = cm->height;
3932 
3933   // reset to normal state now that we are done.
3934   if (!cm->show_existing_frame)
3935     cm->last_show_frame = cm->show_frame;
3936 
3937   if (cm->show_frame) {
3938     vp9_swap_mi_and_prev_mi(cm);
3939     // Don't increment frame counters if this was an altref buffer
3940     // update not a real frame
3941     ++cm->current_video_frame;
3942     if (cpi->use_svc)
3943       vp9_inc_frame_in_layer(cpi);
3944   }
3945   cm->prev_frame = cm->cur_frame;
3946 
3947   if (cpi->use_svc)
3948     cpi->svc.layer_context[cpi->svc.spatial_layer_id *
3949                            cpi->svc.number_temporal_layers +
3950                            cpi->svc.temporal_layer_id].last_frame_type =
3951                                cm->frame_type;
3952 }
3953 
3954 static void SvcEncode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
3955                       unsigned int *frame_flags) {
3956   vp9_rc_get_svc_params(cpi);
3957   encode_frame_to_data_rate(cpi, size, dest, frame_flags);
3958 }
3959 
3960 static void Pass0Encode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
3961                         unsigned int *frame_flags) {
3962   if (cpi->oxcf.rc_mode == VPX_CBR) {
3963     vp9_rc_get_one_pass_cbr_params(cpi);
3964   } else {
3965     vp9_rc_get_one_pass_vbr_params(cpi);
3966   }
3967   encode_frame_to_data_rate(cpi, size, dest, frame_flags);
3968 }
3969 
3970 static void Pass2Encode(VP9_COMP *cpi, size_t *size,
3971                         uint8_t *dest, unsigned int *frame_flags) {
3972   cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
3973   encode_frame_to_data_rate(cpi, size, dest, frame_flags);
3974 
3975   if (!(is_two_pass_svc(cpi) && cpi->svc.encode_empty_frame_state == ENCODING))
3976     vp9_twopass_postencode_update(cpi);
3977 }
3978 
3979 static void init_ref_frame_bufs(VP9_COMMON *cm) {
3980   int i;
3981   BufferPool *const pool = cm->buffer_pool;
3982   cm->new_fb_idx = INVALID_IDX;
3983   for (i = 0; i < REF_FRAMES; ++i) {
3984     cm->ref_frame_map[i] = INVALID_IDX;
3985     pool->frame_bufs[i].ref_count = 0;
3986   }
3987 }
3988 
3989 static void check_initial_width(VP9_COMP *cpi,
3990 #if CONFIG_VP9_HIGHBITDEPTH
3991                                 int use_highbitdepth,
3992 #endif
3993                                 int subsampling_x, int subsampling_y) {
3994   VP9_COMMON *const cm = &cpi->common;
3995 
3996   if (!cpi->initial_width ||
3997 #if CONFIG_VP9_HIGHBITDEPTH
3998       cm->use_highbitdepth != use_highbitdepth ||
3999 #endif
4000       cm->subsampling_x != subsampling_x ||
4001       cm->subsampling_y != subsampling_y) {
4002     cm->subsampling_x = subsampling_x;
4003     cm->subsampling_y = subsampling_y;
4004 #if CONFIG_VP9_HIGHBITDEPTH
4005     cm->use_highbitdepth = use_highbitdepth;
4006 #endif
4007 
4008     alloc_raw_frame_buffers(cpi);
4009     init_ref_frame_bufs(cm);
4010     alloc_util_frame_buffers(cpi);
4011 
4012     init_motion_estimation(cpi);  // TODO(agrange) This can be removed.
4013 
4014     cpi->initial_width = cm->width;
4015     cpi->initial_height = cm->height;
4016     cpi->initial_mbs = cm->MBs;
4017   }
4018 }
4019 
4020 int vp9_receive_raw_frame(VP9_COMP *cpi, unsigned int frame_flags,
4021                           YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
4022                           int64_t end_time) {
4023   VP9_COMMON *cm = &cpi->common;
4024   struct vpx_usec_timer timer;
4025   int res = 0;
4026   const int subsampling_x = sd->subsampling_x;
4027   const int subsampling_y = sd->subsampling_y;
4028 #if CONFIG_VP9_HIGHBITDEPTH
4029   const int use_highbitdepth = sd->flags & YV12_FLAG_HIGHBITDEPTH;
4030   check_initial_width(cpi, use_highbitdepth, subsampling_x, subsampling_y);
4031 #else
4032   check_initial_width(cpi, subsampling_x, subsampling_y);
4033 #endif  // CONFIG_VP9_HIGHBITDEPTH
4034 
4035 #if CONFIG_VP9_TEMPORAL_DENOISING
4036   setup_denoiser_buffer(cpi);
4037 #endif
4038   vpx_usec_timer_start(&timer);
4039 
4040   if (vp9_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4041 #if CONFIG_VP9_HIGHBITDEPTH
4042                          use_highbitdepth,
4043 #endif  // CONFIG_VP9_HIGHBITDEPTH
4044                          frame_flags))
4045     res = -1;
4046   vpx_usec_timer_mark(&timer);
4047   cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4048 
4049   if ((cm->profile == PROFILE_0 || cm->profile == PROFILE_2) &&
4050       (subsampling_x != 1 || subsampling_y != 1)) {
4051     vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
4052                        "Non-4:2:0 color format requires profile 1 or 3");
4053     res = -1;
4054   }
4055   if ((cm->profile == PROFILE_1 || cm->profile == PROFILE_3) &&
4056       (subsampling_x == 1 && subsampling_y == 1)) {
4057     vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
4058                        "4:2:0 color format requires profile 0 or 2");
4059     res = -1;
4060   }
4061 
4062   return res;
4063 }
4064 
4065 
4066 static int frame_is_reference(const VP9_COMP *cpi) {
4067   const VP9_COMMON *cm = &cpi->common;
4068 
4069   return cm->frame_type == KEY_FRAME ||
4070          cpi->refresh_last_frame ||
4071          cpi->refresh_golden_frame ||
4072          cpi->refresh_alt_ref_frame ||
4073          cm->refresh_frame_context ||
4074          cm->lf.mode_ref_delta_update ||
4075          cm->seg.update_map ||
4076          cm->seg.update_data;
4077 }
4078 
4079 static void adjust_frame_rate(VP9_COMP *cpi,
4080                               const struct lookahead_entry *source) {
4081   int64_t this_duration;
4082   int step = 0;
4083 
4084   if (source->ts_start == cpi->first_time_stamp_ever) {
4085     this_duration = source->ts_end - source->ts_start;
4086     step = 1;
4087   } else {
4088     int64_t last_duration = cpi->last_end_time_stamp_seen
4089         - cpi->last_time_stamp_seen;
4090 
4091     this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
4092 
4093     // do a step update if the duration changes by 10%
4094     if (last_duration)
4095       step = (int)((this_duration - last_duration) * 10 / last_duration);
4096   }
4097 
4098   if (this_duration) {
4099     if (step) {
4100       vp9_new_framerate(cpi, 10000000.0 / this_duration);
4101     } else {
4102       // Average this frame's rate into the last second's average
4103       // frame rate. If we haven't seen 1 second yet, then average
4104       // over the whole interval seen.
4105       const double interval = VPXMIN(
4106           (double)(source->ts_end - cpi->first_time_stamp_ever), 10000000.0);
4107       double avg_duration = 10000000.0 / cpi->framerate;
4108       avg_duration *= (interval - avg_duration + this_duration);
4109       avg_duration /= interval;
4110 
4111       vp9_new_framerate(cpi, 10000000.0 / avg_duration);
4112     }
4113   }
4114   cpi->last_time_stamp_seen = source->ts_start;
4115   cpi->last_end_time_stamp_seen = source->ts_end;
4116 }
4117 
4118 // Returns 0 if this is not an alt ref else the offset of the source frame
4119 // used as the arf midpoint.
4120 static int get_arf_src_index(VP9_COMP *cpi) {
4121   RATE_CONTROL *const rc = &cpi->rc;
4122   int arf_src_index = 0;
4123   if (is_altref_enabled(cpi)) {
4124     if (cpi->oxcf.pass == 2) {
4125       const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4126       if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
4127         arf_src_index = gf_group->arf_src_offset[gf_group->index];
4128       }
4129     } else if (rc->source_alt_ref_pending) {
4130       arf_src_index = rc->frames_till_gf_update_due;
4131     }
4132   }
4133   return arf_src_index;
4134 }
4135 
4136 static void check_src_altref(VP9_COMP *cpi,
4137                              const struct lookahead_entry *source) {
4138   RATE_CONTROL *const rc = &cpi->rc;
4139 
4140   if (cpi->oxcf.pass == 2) {
4141     const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4142     rc->is_src_frame_alt_ref =
4143       (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
4144   } else {
4145     rc->is_src_frame_alt_ref = cpi->alt_ref_source &&
4146                                (source == cpi->alt_ref_source);
4147   }
4148 
4149   if (rc->is_src_frame_alt_ref) {
4150     // Current frame is an ARF overlay frame.
4151     cpi->alt_ref_source = NULL;
4152 
4153     // Don't refresh the last buffer for an ARF overlay frame. It will
4154     // become the GF so preserve last as an alternative prediction option.
4155     cpi->refresh_last_frame = 0;
4156   }
4157 }
4158 
4159 #if CONFIG_INTERNAL_STATS
4160 extern double vp9_get_blockiness(const uint8_t *img1, int img1_pitch,
4161                                  const uint8_t *img2, int img2_pitch,
4162                                  int width, int height);
4163 
4164 static void adjust_image_stat(double y, double u, double v, double all,
4165                               ImageStat *s) {
4166   s->stat[Y] += y;
4167   s->stat[U] += u;
4168   s->stat[V] += v;
4169   s->stat[ALL] += all;
4170   s->worst = VPXMIN(s->worst, all);
4171 }
4172 #endif  // CONFIG_INTERNAL_STATS
4173 
4174 int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
4175                             size_t *size, uint8_t *dest,
4176                             int64_t *time_stamp, int64_t *time_end, int flush) {
4177   const VP9EncoderConfig *const oxcf = &cpi->oxcf;
4178   VP9_COMMON *const cm = &cpi->common;
4179   BufferPool *const pool = cm->buffer_pool;
4180   RATE_CONTROL *const rc = &cpi->rc;
4181   struct vpx_usec_timer  cmptimer;
4182   YV12_BUFFER_CONFIG *force_src_buffer = NULL;
4183   struct lookahead_entry *last_source = NULL;
4184   struct lookahead_entry *source = NULL;
4185   int arf_src_index;
4186   int i;
4187 
4188   if (is_two_pass_svc(cpi)) {
4189 #if CONFIG_SPATIAL_SVC
4190     vp9_svc_start_frame(cpi);
4191     // Use a small empty frame instead of a real frame
4192     if (cpi->svc.encode_empty_frame_state == ENCODING)
4193       source = &cpi->svc.empty_frame;
4194 #endif
4195     if (oxcf->pass == 2)
4196       vp9_restore_layer_context(cpi);
4197   } else if (is_one_pass_cbr_svc(cpi)) {
4198     vp9_one_pass_cbr_svc_start_layer(cpi);
4199   }
4200 
4201   vpx_usec_timer_start(&cmptimer);
4202 
4203   vp9_set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV);
4204 
4205   // Is multi-arf enabled.
4206   // Note that at the moment multi_arf is only configured for 2 pass VBR and
4207   // will not work properly with svc.
4208   if ((oxcf->pass == 2) && !cpi->use_svc &&
4209       (cpi->oxcf.enable_auto_arf > 1))
4210     cpi->multi_arf_allowed = 1;
4211   else
4212     cpi->multi_arf_allowed = 0;
4213 
4214   // Normal defaults
4215   cm->reset_frame_context = 0;
4216   cm->refresh_frame_context = 1;
4217   if (!is_one_pass_cbr_svc(cpi)) {
4218     cpi->refresh_last_frame = 1;
4219     cpi->refresh_golden_frame = 0;
4220     cpi->refresh_alt_ref_frame = 0;
4221   }
4222 
4223   // Should we encode an arf frame.
4224   arf_src_index = get_arf_src_index(cpi);
4225 
4226   // Skip alt frame if we encode the empty frame
4227   if (is_two_pass_svc(cpi) && source != NULL)
4228     arf_src_index = 0;
4229 
4230   if (arf_src_index) {
4231     assert(arf_src_index <= rc->frames_to_key);
4232 
4233     if ((source = vp9_lookahead_peek(cpi->lookahead, arf_src_index)) != NULL) {
4234       cpi->alt_ref_source = source;
4235 
4236 #if CONFIG_SPATIAL_SVC
4237       if (is_two_pass_svc(cpi) && cpi->svc.spatial_layer_id > 0) {
4238         int i;
4239         // Reference a hidden frame from a lower layer
4240         for (i = cpi->svc.spatial_layer_id - 1; i >= 0; --i) {
4241           if (oxcf->ss_enable_auto_arf[i]) {
4242             cpi->gld_fb_idx = cpi->svc.layer_context[i].alt_ref_idx;
4243             break;
4244           }
4245         }
4246       }
4247       cpi->svc.layer_context[cpi->svc.spatial_layer_id].has_alt_frame = 1;
4248 #endif
4249 
4250       if (oxcf->arnr_max_frames > 0) {
4251         // Produce the filtered ARF frame.
4252         vp9_temporal_filter(cpi, arf_src_index);
4253         vpx_extend_frame_borders(&cpi->alt_ref_buffer);
4254         force_src_buffer = &cpi->alt_ref_buffer;
4255       }
4256 
4257       cm->show_frame = 0;
4258       cm->intra_only = 0;
4259       cpi->refresh_alt_ref_frame = 1;
4260       cpi->refresh_golden_frame = 0;
4261       cpi->refresh_last_frame = 0;
4262       rc->is_src_frame_alt_ref = 0;
4263       rc->source_alt_ref_pending = 0;
4264     } else {
4265       rc->source_alt_ref_pending = 0;
4266     }
4267   }
4268 
4269   if (!source) {
4270     // Get last frame source.
4271     if (cm->current_video_frame > 0) {
4272       if ((last_source = vp9_lookahead_peek(cpi->lookahead, -1)) == NULL)
4273         return -1;
4274     }
4275 
4276     // Read in the source frame.
4277     if (cpi->use_svc)
4278       source = vp9_svc_lookahead_pop(cpi, cpi->lookahead, flush);
4279     else
4280       source = vp9_lookahead_pop(cpi->lookahead, flush);
4281 
4282     if (source != NULL) {
4283       cm->show_frame = 1;
4284       cm->intra_only = 0;
4285       // if the flags indicate intra frame, but if the current picture is for
4286       // non-zero spatial layer, it should not be an intra picture.
4287       // TODO(Won Kap): this needs to change if per-layer intra frame is
4288       // allowed.
4289       if ((source->flags & VPX_EFLAG_FORCE_KF) &&
4290           cpi->svc.spatial_layer_id > cpi->svc.first_spatial_layer_to_encode) {
4291         source->flags &= ~(unsigned int)(VPX_EFLAG_FORCE_KF);
4292       }
4293 
4294       // Check to see if the frame should be encoded as an arf overlay.
4295       check_src_altref(cpi, source);
4296     }
4297   }
4298 
4299   if (source) {
4300     cpi->un_scaled_source = cpi->Source = force_src_buffer ? force_src_buffer
4301                                                            : &source->img;
4302 
4303     cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
4304 
4305     *time_stamp = source->ts_start;
4306     *time_end = source->ts_end;
4307     *frame_flags = (source->flags & VPX_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
4308 
4309   } else {
4310     *size = 0;
4311     if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
4312       vp9_end_first_pass(cpi);    /* get last stats packet */
4313       cpi->twopass.first_pass_done = 1;
4314     }
4315     return -1;
4316   }
4317 
4318   if (source->ts_start < cpi->first_time_stamp_ever) {
4319     cpi->first_time_stamp_ever = source->ts_start;
4320     cpi->last_end_time_stamp_seen = source->ts_start;
4321   }
4322 
4323   // Clear down mmx registers
4324   vpx_clear_system_state();
4325 
4326   // adjust frame rates based on timestamps given
4327   if (cm->show_frame) {
4328     adjust_frame_rate(cpi, source);
4329   }
4330 
4331   if (is_one_pass_cbr_svc(cpi)) {
4332     vp9_update_temporal_layer_framerate(cpi);
4333     vp9_restore_layer_context(cpi);
4334   }
4335 
4336   // Find a free buffer for the new frame, releasing the reference previously
4337   // held.
4338   if (cm->new_fb_idx != INVALID_IDX) {
4339     --pool->frame_bufs[cm->new_fb_idx].ref_count;
4340   }
4341   cm->new_fb_idx = get_free_fb(cm);
4342 
4343   if (cm->new_fb_idx == INVALID_IDX)
4344     return -1;
4345 
4346   cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx];
4347 
4348   if (!cpi->use_svc && cpi->multi_arf_allowed) {
4349     if (cm->frame_type == KEY_FRAME) {
4350       init_buffer_indices(cpi);
4351     } else if (oxcf->pass == 2) {
4352       const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4353       cpi->alt_fb_idx = gf_group->arf_ref_idx[gf_group->index];
4354     }
4355   }
4356 
4357   // Start with a 0 size frame.
4358   *size = 0;
4359 
4360   cpi->frame_flags = *frame_flags;
4361 
4362   if ((oxcf->pass == 2) &&
4363       (!cpi->use_svc ||
4364           (is_two_pass_svc(cpi) &&
4365               cpi->svc.encode_empty_frame_state != ENCODING))) {
4366     vp9_rc_get_second_pass_params(cpi);
4367   } else if (oxcf->pass == 1) {
4368     set_frame_size(cpi);
4369   }
4370 
4371   if (cpi->oxcf.pass != 0 ||
4372       cpi->use_svc ||
4373       frame_is_intra_only(cm) == 1) {
4374     for (i = 0; i < MAX_REF_FRAMES; ++i)
4375       cpi->scaled_ref_idx[i] = INVALID_IDX;
4376   }
4377 
4378   if (oxcf->pass == 1 &&
4379       (!cpi->use_svc || is_two_pass_svc(cpi))) {
4380     const int lossless = is_lossless_requested(oxcf);
4381 #if CONFIG_VP9_HIGHBITDEPTH
4382     if (cpi->oxcf.use_highbitdepth)
4383       cpi->td.mb.fwd_txm4x4 = lossless ?
4384           vp9_highbd_fwht4x4 : vpx_highbd_fdct4x4;
4385     else
4386       cpi->td.mb.fwd_txm4x4 = lossless ? vp9_fwht4x4 : vpx_fdct4x4;
4387     cpi->td.mb.highbd_itxm_add = lossless ? vp9_highbd_iwht4x4_add :
4388                                          vp9_highbd_idct4x4_add;
4389 #else
4390     cpi->td.mb.fwd_txm4x4 = lossless ? vp9_fwht4x4 : vpx_fdct4x4;
4391 #endif  // CONFIG_VP9_HIGHBITDEPTH
4392     cpi->td.mb.itxm_add = lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
4393     vp9_first_pass(cpi, source);
4394   } else if (oxcf->pass == 2 &&
4395       (!cpi->use_svc || is_two_pass_svc(cpi))) {
4396     Pass2Encode(cpi, size, dest, frame_flags);
4397   } else if (cpi->use_svc) {
4398     SvcEncode(cpi, size, dest, frame_flags);
4399   } else {
4400     // One pass encode
4401     Pass0Encode(cpi, size, dest, frame_flags);
4402   }
4403 
4404   if (cm->refresh_frame_context)
4405     cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
4406 
4407   // No frame encoded, or frame was dropped, release scaled references.
4408   if ((*size == 0) && (frame_is_intra_only(cm) == 0)) {
4409     release_scaled_references(cpi);
4410   }
4411 
4412   if (*size > 0) {
4413     cpi->droppable = !frame_is_reference(cpi);
4414   }
4415 
4416   // Save layer specific state.
4417   if (is_one_pass_cbr_svc(cpi) ||
4418         ((cpi->svc.number_temporal_layers > 1 ||
4419           cpi->svc.number_spatial_layers > 1) &&
4420          oxcf->pass == 2)) {
4421     vp9_save_layer_context(cpi);
4422   }
4423 
4424   vpx_usec_timer_mark(&cmptimer);
4425   cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
4426 
4427   if (cpi->b_calculate_psnr && oxcf->pass != 1 && cm->show_frame)
4428     generate_psnr_packet(cpi);
4429 
4430 #if CONFIG_INTERNAL_STATS
4431 
4432   if (oxcf->pass != 1) {
4433     double samples = 0.0;
4434     cpi->bytes += (int)(*size);
4435 
4436     if (cm->show_frame) {
4437       cpi->count++;
4438 
4439       if (cpi->b_calculate_psnr) {
4440         YV12_BUFFER_CONFIG *orig = cpi->Source;
4441         YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
4442         YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
4443         PSNR_STATS psnr;
4444 #if CONFIG_VP9_HIGHBITDEPTH
4445         calc_highbd_psnr(orig, recon, &psnr, cpi->td.mb.e_mbd.bd,
4446                          cpi->oxcf.input_bit_depth);
4447 #else
4448         calc_psnr(orig, recon, &psnr);
4449 #endif  // CONFIG_VP9_HIGHBITDEPTH
4450 
4451         adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3],
4452                           psnr.psnr[0], &cpi->psnr);
4453         cpi->total_sq_error += psnr.sse[0];
4454         cpi->total_samples += psnr.samples[0];
4455         samples = psnr.samples[0];
4456 
4457         {
4458           PSNR_STATS psnr2;
4459           double frame_ssim2 = 0, weight = 0;
4460 #if CONFIG_VP9_POSTPROC
4461           if (vpx_alloc_frame_buffer(&cm->post_proc_buffer,
4462                                      recon->y_crop_width, recon->y_crop_height,
4463                                      cm->subsampling_x, cm->subsampling_y,
4464 #if CONFIG_VP9_HIGHBITDEPTH
4465                                      cm->use_highbitdepth,
4466 #endif
4467                                      VP9_ENC_BORDER_IN_PIXELS,
4468                                      cm->byte_alignment) < 0) {
4469             vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
4470                                "Failed to allocate post processing buffer");
4471           }
4472 
4473           vp9_deblock(cm->frame_to_show, &cm->post_proc_buffer,
4474                       cm->lf.filter_level * 10 / 6);
4475 #endif
4476           vpx_clear_system_state();
4477 
4478 #if CONFIG_VP9_HIGHBITDEPTH
4479           calc_highbd_psnr(orig, pp, &psnr2, cpi->td.mb.e_mbd.bd,
4480                            cpi->oxcf.input_bit_depth);
4481 #else
4482           calc_psnr(orig, pp, &psnr2);
4483 #endif  // CONFIG_VP9_HIGHBITDEPTH
4484 
4485           cpi->totalp_sq_error += psnr2.sse[0];
4486           cpi->totalp_samples += psnr2.samples[0];
4487           adjust_image_stat(psnr2.psnr[1], psnr2.psnr[2], psnr2.psnr[3],
4488                             psnr2.psnr[0], &cpi->psnrp);
4489 
4490 #if CONFIG_VP9_HIGHBITDEPTH
4491           if (cm->use_highbitdepth) {
4492             frame_ssim2 = vpx_highbd_calc_ssim(orig, recon, &weight,
4493                                                (int)cm->bit_depth);
4494           } else {
4495             frame_ssim2 = vpx_calc_ssim(orig, recon, &weight);
4496           }
4497 #else
4498           frame_ssim2 = vpx_calc_ssim(orig, recon, &weight);
4499 #endif  // CONFIG_VP9_HIGHBITDEPTH
4500 
4501           cpi->worst_ssim = VPXMIN(cpi->worst_ssim, frame_ssim2);
4502           cpi->summed_quality += frame_ssim2 * weight;
4503           cpi->summed_weights += weight;
4504 
4505 #if CONFIG_VP9_HIGHBITDEPTH
4506           if (cm->use_highbitdepth) {
4507             frame_ssim2 = vpx_highbd_calc_ssim(
4508                 orig, &cm->post_proc_buffer, &weight, (int)cm->bit_depth);
4509           } else {
4510             frame_ssim2 = vpx_calc_ssim(orig, &cm->post_proc_buffer, &weight);
4511           }
4512 #else
4513           frame_ssim2 = vpx_calc_ssim(orig, &cm->post_proc_buffer, &weight);
4514 #endif  // CONFIG_VP9_HIGHBITDEPTH
4515 
4516           cpi->summedp_quality += frame_ssim2 * weight;
4517           cpi->summedp_weights += weight;
4518 #if 0
4519           {
4520             FILE *f = fopen("q_used.stt", "a");
4521             fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
4522                     cpi->common.current_video_frame, y2, u2, v2,
4523                     frame_psnr2, frame_ssim2);
4524             fclose(f);
4525           }
4526 #endif
4527         }
4528       }
4529       if (cpi->b_calculate_blockiness) {
4530 #if CONFIG_VP9_HIGHBITDEPTH
4531         if (!cm->use_highbitdepth)
4532 #endif
4533         {
4534           double frame_blockiness = vp9_get_blockiness(
4535               cpi->Source->y_buffer, cpi->Source->y_stride,
4536               cm->frame_to_show->y_buffer, cm->frame_to_show->y_stride,
4537               cpi->Source->y_width, cpi->Source->y_height);
4538           cpi->worst_blockiness =
4539               VPXMAX(cpi->worst_blockiness, frame_blockiness);
4540           cpi->total_blockiness += frame_blockiness;
4541         }
4542       }
4543 
4544       if (cpi->b_calculate_consistency) {
4545 #if CONFIG_VP9_HIGHBITDEPTH
4546         if (!cm->use_highbitdepth)
4547 #endif
4548         {
4549           double this_inconsistency = vpx_get_ssim_metrics(
4550               cpi->Source->y_buffer, cpi->Source->y_stride,
4551               cm->frame_to_show->y_buffer, cm->frame_to_show->y_stride,
4552               cpi->Source->y_width, cpi->Source->y_height, cpi->ssim_vars,
4553               &cpi->metrics, 1);
4554 
4555           const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
4556           double consistency = vpx_sse_to_psnr(samples, peak,
4557                                              (double)cpi->total_inconsistency);
4558           if (consistency > 0.0)
4559             cpi->worst_consistency =
4560                 VPXMIN(cpi->worst_consistency, consistency);
4561           cpi->total_inconsistency += this_inconsistency;
4562         }
4563       }
4564 
4565       if (cpi->b_calculate_ssimg) {
4566         double y, u, v, frame_all;
4567 #if CONFIG_VP9_HIGHBITDEPTH
4568         if (cm->use_highbitdepth) {
4569           frame_all = vpx_highbd_calc_ssimg(cpi->Source, cm->frame_to_show, &y,
4570                                             &u, &v, (int)cm->bit_depth);
4571         } else {
4572           frame_all = vpx_calc_ssimg(cpi->Source, cm->frame_to_show, &y, &u,
4573                                      &v);
4574         }
4575 #else
4576         frame_all = vpx_calc_ssimg(cpi->Source, cm->frame_to_show, &y, &u, &v);
4577 #endif  // CONFIG_VP9_HIGHBITDEPTH
4578         adjust_image_stat(y, u, v, frame_all, &cpi->ssimg);
4579       }
4580 #if CONFIG_VP9_HIGHBITDEPTH
4581       if (!cm->use_highbitdepth)
4582 #endif
4583       {
4584         double y, u, v, frame_all;
4585         frame_all = vpx_calc_fastssim(cpi->Source, cm->frame_to_show, &y, &u,
4586                                       &v);
4587         adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
4588         /* TODO(JBB): add 10/12 bit support */
4589       }
4590 #if CONFIG_VP9_HIGHBITDEPTH
4591       if (!cm->use_highbitdepth)
4592 #endif
4593       {
4594         double y, u, v, frame_all;
4595         frame_all = vpx_psnrhvs(cpi->Source, cm->frame_to_show, &y, &u, &v);
4596         adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
4597       }
4598     }
4599   }
4600 
4601 #endif
4602 
4603   if (is_two_pass_svc(cpi)) {
4604     if (cpi->svc.encode_empty_frame_state == ENCODING) {
4605       cpi->svc.encode_empty_frame_state = ENCODED;
4606       cpi->svc.encode_intra_empty_frame = 0;
4607     }
4608 
4609     if (cm->show_frame) {
4610       ++cpi->svc.spatial_layer_to_encode;
4611       if (cpi->svc.spatial_layer_to_encode >= cpi->svc.number_spatial_layers)
4612         cpi->svc.spatial_layer_to_encode = 0;
4613 
4614       // May need the empty frame after an visible frame.
4615       cpi->svc.encode_empty_frame_state = NEED_TO_ENCODE;
4616     }
4617   } else if (is_one_pass_cbr_svc(cpi)) {
4618     if (cm->show_frame) {
4619       ++cpi->svc.spatial_layer_to_encode;
4620       if (cpi->svc.spatial_layer_to_encode >= cpi->svc.number_spatial_layers)
4621         cpi->svc.spatial_layer_to_encode = 0;
4622     }
4623   }
4624   vpx_clear_system_state();
4625   return 0;
4626 }
4627 
4628 int vp9_get_preview_raw_frame(VP9_COMP *cpi, YV12_BUFFER_CONFIG *dest,
4629                               vp9_ppflags_t *flags) {
4630   VP9_COMMON *cm = &cpi->common;
4631 #if !CONFIG_VP9_POSTPROC
4632   (void)flags;
4633 #endif
4634 
4635   if (!cm->show_frame) {
4636     return -1;
4637   } else {
4638     int ret;
4639 #if CONFIG_VP9_POSTPROC
4640     ret = vp9_post_proc_frame(cm, dest, flags);
4641 #else
4642     if (cm->frame_to_show) {
4643       *dest = *cm->frame_to_show;
4644       dest->y_width = cm->width;
4645       dest->y_height = cm->height;
4646       dest->uv_width = cm->width >> cm->subsampling_x;
4647       dest->uv_height = cm->height >> cm->subsampling_y;
4648       ret = 0;
4649     } else {
4650       ret = -1;
4651     }
4652 #endif  // !CONFIG_VP9_POSTPROC
4653     vpx_clear_system_state();
4654     return ret;
4655   }
4656 }
4657 
4658 int vp9_set_internal_size(VP9_COMP *cpi,
4659                           VPX_SCALING horiz_mode, VPX_SCALING vert_mode) {
4660   VP9_COMMON *cm = &cpi->common;
4661   int hr = 0, hs = 0, vr = 0, vs = 0;
4662 
4663   if (horiz_mode > ONETWO || vert_mode > ONETWO)
4664     return -1;
4665 
4666   Scale2Ratio(horiz_mode, &hr, &hs);
4667   Scale2Ratio(vert_mode, &vr, &vs);
4668 
4669   // always go to the next whole number
4670   cm->width = (hs - 1 + cpi->oxcf.width * hr) / hs;
4671   cm->height = (vs - 1 + cpi->oxcf.height * vr) / vs;
4672   if (cm->current_video_frame) {
4673     assert(cm->width <= cpi->initial_width);
4674     assert(cm->height <= cpi->initial_height);
4675   }
4676 
4677   update_frame_size(cpi);
4678 
4679   return 0;
4680 }
4681 
4682 int vp9_set_size_literal(VP9_COMP *cpi, unsigned int width,
4683                          unsigned int height) {
4684   VP9_COMMON *cm = &cpi->common;
4685 #if CONFIG_VP9_HIGHBITDEPTH
4686   check_initial_width(cpi, cm->use_highbitdepth, 1, 1);
4687 #else
4688   check_initial_width(cpi, 1, 1);
4689 #endif  // CONFIG_VP9_HIGHBITDEPTH
4690 
4691 #if CONFIG_VP9_TEMPORAL_DENOISING
4692   setup_denoiser_buffer(cpi);
4693 #endif
4694 
4695   if (width) {
4696     cm->width = width;
4697     if (cm->width > cpi->initial_width) {
4698       cm->width = cpi->initial_width;
4699       printf("Warning: Desired width too large, changed to %d\n", cm->width);
4700     }
4701   }
4702 
4703   if (height) {
4704     cm->height = height;
4705     if (cm->height > cpi->initial_height) {
4706       cm->height = cpi->initial_height;
4707       printf("Warning: Desired height too large, changed to %d\n", cm->height);
4708     }
4709   }
4710   assert(cm->width <= cpi->initial_width);
4711   assert(cm->height <= cpi->initial_height);
4712 
4713   update_frame_size(cpi);
4714 
4715   return 0;
4716 }
4717 
4718 void vp9_set_svc(VP9_COMP *cpi, int use_svc) {
4719   cpi->use_svc = use_svc;
4720   return;
4721 }
4722 
4723 int64_t vp9_get_y_sse(const YV12_BUFFER_CONFIG *a,
4724                       const YV12_BUFFER_CONFIG *b) {
4725   assert(a->y_crop_width == b->y_crop_width);
4726   assert(a->y_crop_height == b->y_crop_height);
4727 
4728   return get_sse(a->y_buffer, a->y_stride, b->y_buffer, b->y_stride,
4729                  a->y_crop_width, a->y_crop_height);
4730 }
4731 
4732 #if CONFIG_VP9_HIGHBITDEPTH
4733 int64_t vp9_highbd_get_y_sse(const YV12_BUFFER_CONFIG *a,
4734                              const YV12_BUFFER_CONFIG *b) {
4735   assert(a->y_crop_width == b->y_crop_width);
4736   assert(a->y_crop_height == b->y_crop_height);
4737   assert((a->flags & YV12_FLAG_HIGHBITDEPTH) != 0);
4738   assert((b->flags & YV12_FLAG_HIGHBITDEPTH) != 0);
4739 
4740   return highbd_get_sse(a->y_buffer, a->y_stride, b->y_buffer, b->y_stride,
4741                         a->y_crop_width, a->y_crop_height);
4742 }
4743 #endif  // CONFIG_VP9_HIGHBITDEPTH
4744 
4745 int vp9_get_quantizer(VP9_COMP *cpi) {
4746   return cpi->common.base_qindex;
4747 }
4748 
4749 void vp9_apply_encoding_flags(VP9_COMP *cpi, vpx_enc_frame_flags_t flags) {
4750   if (flags & (VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF |
4751                VP8_EFLAG_NO_REF_ARF)) {
4752     int ref = 7;
4753 
4754     if (flags & VP8_EFLAG_NO_REF_LAST)
4755       ref ^= VP9_LAST_FLAG;
4756 
4757     if (flags & VP8_EFLAG_NO_REF_GF)
4758       ref ^= VP9_GOLD_FLAG;
4759 
4760     if (flags & VP8_EFLAG_NO_REF_ARF)
4761       ref ^= VP9_ALT_FLAG;
4762 
4763     vp9_use_as_reference(cpi, ref);
4764   }
4765 
4766   if (flags & (VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
4767                VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_FORCE_GF |
4768                VP8_EFLAG_FORCE_ARF)) {
4769     int upd = 7;
4770 
4771     if (flags & VP8_EFLAG_NO_UPD_LAST)
4772       upd ^= VP9_LAST_FLAG;
4773 
4774     if (flags & VP8_EFLAG_NO_UPD_GF)
4775       upd ^= VP9_GOLD_FLAG;
4776 
4777     if (flags & VP8_EFLAG_NO_UPD_ARF)
4778       upd ^= VP9_ALT_FLAG;
4779 
4780     vp9_update_reference(cpi, upd);
4781   }
4782 
4783   if (flags & VP8_EFLAG_NO_UPD_ENTROPY) {
4784     vp9_update_entropy(cpi, 0);
4785   }
4786 }
4787