1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <stdlib.h>
12 #include <string.h>
13
14 #include "./vpx_config.h"
15 #include "./vpx_version.h"
16
17 #include "vpx/internal/vpx_codec_internal.h"
18 #include "vpx/vp8dx.h"
19 #include "vpx/vpx_decoder.h"
20 #include "vpx_dsp/bitreader_buffer.h"
21 #include "vpx_dsp/vpx_dsp_common.h"
22 #include "vpx_util/vpx_thread.h"
23
24 #include "vp9/common/vp9_alloccommon.h"
25 #include "vp9/common/vp9_frame_buffers.h"
26
27 #include "vp9/decoder/vp9_decodeframe.h"
28
29 #include "vp9/vp9_dx_iface.h"
30 #include "vp9/vp9_iface_common.h"
31
32 #define VP9_CAP_POSTPROC (CONFIG_VP9_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
33
decoder_init(vpx_codec_ctx_t * ctx,vpx_codec_priv_enc_mr_cfg_t * data)34 static vpx_codec_err_t decoder_init(vpx_codec_ctx_t *ctx,
35 vpx_codec_priv_enc_mr_cfg_t *data) {
36 // This function only allocates space for the vpx_codec_alg_priv_t
37 // structure. More memory may be required at the time the stream
38 // information becomes known.
39 (void)data;
40
41 if (!ctx->priv) {
42 vpx_codec_alg_priv_t *const priv =
43 (vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
44 if (priv == NULL)
45 return VPX_CODEC_MEM_ERROR;
46
47 ctx->priv = (vpx_codec_priv_t *)priv;
48 ctx->priv->init_flags = ctx->init_flags;
49 priv->si.sz = sizeof(priv->si);
50 priv->flushed = 0;
51 // Only do frame parallel decode when threads > 1.
52 priv->frame_parallel_decode =
53 (ctx->config.dec && (ctx->config.dec->threads > 1) &&
54 (ctx->init_flags & VPX_CODEC_USE_FRAME_THREADING)) ? 1 : 0;
55 if (ctx->config.dec) {
56 priv->cfg = *ctx->config.dec;
57 ctx->config.dec = &priv->cfg;
58 }
59 }
60
61 return VPX_CODEC_OK;
62 }
63
decoder_destroy(vpx_codec_alg_priv_t * ctx)64 static vpx_codec_err_t decoder_destroy(vpx_codec_alg_priv_t *ctx) {
65 if (ctx->frame_workers != NULL) {
66 int i;
67 for (i = 0; i < ctx->num_frame_workers; ++i) {
68 VPxWorker *const worker = &ctx->frame_workers[i];
69 FrameWorkerData *const frame_worker_data =
70 (FrameWorkerData *)worker->data1;
71 vpx_get_worker_interface()->end(worker);
72 vp9_remove_common(&frame_worker_data->pbi->common);
73 #if CONFIG_VP9_POSTPROC
74 vp9_free_postproc_buffers(&frame_worker_data->pbi->common);
75 #endif
76 vp9_decoder_remove(frame_worker_data->pbi);
77 vpx_free(frame_worker_data->scratch_buffer);
78 #if CONFIG_MULTITHREAD
79 pthread_mutex_destroy(&frame_worker_data->stats_mutex);
80 pthread_cond_destroy(&frame_worker_data->stats_cond);
81 #endif
82 vpx_free(frame_worker_data);
83 }
84 #if CONFIG_MULTITHREAD
85 pthread_mutex_destroy(&ctx->buffer_pool->pool_mutex);
86 #endif
87 }
88
89 if (ctx->buffer_pool) {
90 vp9_free_ref_frame_buffers(ctx->buffer_pool);
91 vp9_free_internal_frame_buffers(&ctx->buffer_pool->int_frame_buffers);
92 }
93
94 vpx_free(ctx->frame_workers);
95 vpx_free(ctx->buffer_pool);
96 vpx_free(ctx);
97 return VPX_CODEC_OK;
98 }
99
parse_bitdepth_colorspace_sampling(BITSTREAM_PROFILE profile,struct vpx_read_bit_buffer * rb)100 static int parse_bitdepth_colorspace_sampling(
101 BITSTREAM_PROFILE profile, struct vpx_read_bit_buffer *rb) {
102 vpx_color_space_t color_space;
103 if (profile >= PROFILE_2)
104 rb->bit_offset += 1; // Bit-depth 10 or 12.
105 color_space = (vpx_color_space_t)vpx_rb_read_literal(rb, 3);
106 if (color_space != VPX_CS_SRGB) {
107 rb->bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range.
108 if (profile == PROFILE_1 || profile == PROFILE_3) {
109 rb->bit_offset += 2; // subsampling x/y.
110 rb->bit_offset += 1; // unused.
111 }
112 } else {
113 if (profile == PROFILE_1 || profile == PROFILE_3) {
114 rb->bit_offset += 1; // unused
115 } else {
116 // RGB is only available in version 1.
117 return 0;
118 }
119 }
120 return 1;
121 }
122
decoder_peek_si_internal(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si,int * is_intra_only,vpx_decrypt_cb decrypt_cb,void * decrypt_state)123 static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
124 unsigned int data_sz,
125 vpx_codec_stream_info_t *si,
126 int *is_intra_only,
127 vpx_decrypt_cb decrypt_cb,
128 void *decrypt_state) {
129 int intra_only_flag = 0;
130 uint8_t clear_buffer[9];
131
132 if (data + data_sz <= data)
133 return VPX_CODEC_INVALID_PARAM;
134
135 si->is_kf = 0;
136 si->w = si->h = 0;
137
138 if (decrypt_cb) {
139 data_sz = VPXMIN(sizeof(clear_buffer), data_sz);
140 decrypt_cb(decrypt_state, data, clear_buffer, data_sz);
141 data = clear_buffer;
142 }
143
144 {
145 int show_frame;
146 int error_resilient;
147 struct vpx_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
148 const int frame_marker = vpx_rb_read_literal(&rb, 2);
149 const BITSTREAM_PROFILE profile = vp9_read_profile(&rb);
150
151 if (frame_marker != VP9_FRAME_MARKER)
152 return VPX_CODEC_UNSUP_BITSTREAM;
153
154 if (profile >= MAX_PROFILES)
155 return VPX_CODEC_UNSUP_BITSTREAM;
156
157 if ((profile >= 2 && data_sz <= 1) || data_sz < 1)
158 return VPX_CODEC_UNSUP_BITSTREAM;
159
160 if (vpx_rb_read_bit(&rb)) { // show an existing frame
161 vpx_rb_read_literal(&rb, 3); // Frame buffer to show.
162 return VPX_CODEC_OK;
163 }
164
165 if (data_sz <= 8)
166 return VPX_CODEC_UNSUP_BITSTREAM;
167
168 si->is_kf = !vpx_rb_read_bit(&rb);
169 show_frame = vpx_rb_read_bit(&rb);
170 error_resilient = vpx_rb_read_bit(&rb);
171
172 if (si->is_kf) {
173 if (!vp9_read_sync_code(&rb))
174 return VPX_CODEC_UNSUP_BITSTREAM;
175
176 if (!parse_bitdepth_colorspace_sampling(profile, &rb))
177 return VPX_CODEC_UNSUP_BITSTREAM;
178 vp9_read_frame_size(&rb, (int *)&si->w, (int *)&si->h);
179 } else {
180 intra_only_flag = show_frame ? 0 : vpx_rb_read_bit(&rb);
181
182 rb.bit_offset += error_resilient ? 0 : 2; // reset_frame_context
183
184 if (intra_only_flag) {
185 if (!vp9_read_sync_code(&rb))
186 return VPX_CODEC_UNSUP_BITSTREAM;
187 if (profile > PROFILE_0) {
188 if (!parse_bitdepth_colorspace_sampling(profile, &rb))
189 return VPX_CODEC_UNSUP_BITSTREAM;
190 }
191 rb.bit_offset += REF_FRAMES; // refresh_frame_flags
192 vp9_read_frame_size(&rb, (int *)&si->w, (int *)&si->h);
193 }
194 }
195 }
196 if (is_intra_only != NULL)
197 *is_intra_only = intra_only_flag;
198 return VPX_CODEC_OK;
199 }
200
decoder_peek_si(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si)201 static vpx_codec_err_t decoder_peek_si(const uint8_t *data,
202 unsigned int data_sz,
203 vpx_codec_stream_info_t *si) {
204 return decoder_peek_si_internal(data, data_sz, si, NULL, NULL, NULL);
205 }
206
decoder_get_si(vpx_codec_alg_priv_t * ctx,vpx_codec_stream_info_t * si)207 static vpx_codec_err_t decoder_get_si(vpx_codec_alg_priv_t *ctx,
208 vpx_codec_stream_info_t *si) {
209 const size_t sz = (si->sz >= sizeof(vp9_stream_info_t))
210 ? sizeof(vp9_stream_info_t)
211 : sizeof(vpx_codec_stream_info_t);
212 memcpy(si, &ctx->si, sz);
213 si->sz = (unsigned int)sz;
214
215 return VPX_CODEC_OK;
216 }
217
set_error_detail(vpx_codec_alg_priv_t * ctx,const char * const error)218 static void set_error_detail(vpx_codec_alg_priv_t *ctx,
219 const char *const error) {
220 ctx->base.err_detail = error;
221 }
222
update_error_state(vpx_codec_alg_priv_t * ctx,const struct vpx_internal_error_info * error)223 static vpx_codec_err_t update_error_state(vpx_codec_alg_priv_t *ctx,
224 const struct vpx_internal_error_info *error) {
225 if (error->error_code)
226 set_error_detail(ctx, error->has_detail ? error->detail : NULL);
227
228 return error->error_code;
229 }
230
init_buffer_callbacks(vpx_codec_alg_priv_t * ctx)231 static void init_buffer_callbacks(vpx_codec_alg_priv_t *ctx) {
232 int i;
233
234 for (i = 0; i < ctx->num_frame_workers; ++i) {
235 VPxWorker *const worker = &ctx->frame_workers[i];
236 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
237 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
238 BufferPool *const pool = cm->buffer_pool;
239
240 cm->new_fb_idx = INVALID_IDX;
241 cm->byte_alignment = ctx->byte_alignment;
242 cm->skip_loop_filter = ctx->skip_loop_filter;
243
244 if (ctx->get_ext_fb_cb != NULL && ctx->release_ext_fb_cb != NULL) {
245 pool->get_fb_cb = ctx->get_ext_fb_cb;
246 pool->release_fb_cb = ctx->release_ext_fb_cb;
247 pool->cb_priv = ctx->ext_priv;
248 } else {
249 pool->get_fb_cb = vp9_get_frame_buffer;
250 pool->release_fb_cb = vp9_release_frame_buffer;
251
252 if (vp9_alloc_internal_frame_buffers(&pool->int_frame_buffers))
253 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
254 "Failed to initialize internal frame buffers");
255
256 pool->cb_priv = &pool->int_frame_buffers;
257 }
258 }
259 }
260
set_default_ppflags(vp8_postproc_cfg_t * cfg)261 static void set_default_ppflags(vp8_postproc_cfg_t *cfg) {
262 cfg->post_proc_flag = VP8_DEBLOCK | VP8_DEMACROBLOCK;
263 cfg->deblocking_level = 4;
264 cfg->noise_level = 0;
265 }
266
set_ppflags(const vpx_codec_alg_priv_t * ctx,vp9_ppflags_t * flags)267 static void set_ppflags(const vpx_codec_alg_priv_t *ctx,
268 vp9_ppflags_t *flags) {
269 flags->post_proc_flag =
270 ctx->postproc_cfg.post_proc_flag;
271
272 flags->deblocking_level = ctx->postproc_cfg.deblocking_level;
273 flags->noise_level = ctx->postproc_cfg.noise_level;
274 }
275
frame_worker_hook(void * arg1,void * arg2)276 static int frame_worker_hook(void *arg1, void *arg2) {
277 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)arg1;
278 const uint8_t *data = frame_worker_data->data;
279 (void)arg2;
280
281 frame_worker_data->result =
282 vp9_receive_compressed_data(frame_worker_data->pbi,
283 frame_worker_data->data_size,
284 &data);
285 frame_worker_data->data_end = data;
286
287 if (frame_worker_data->pbi->frame_parallel_decode) {
288 // In frame parallel decoding, a worker thread must successfully decode all
289 // the compressed data.
290 if (frame_worker_data->result != 0 ||
291 frame_worker_data->data + frame_worker_data->data_size - 1 > data) {
292 VPxWorker *const worker = frame_worker_data->pbi->frame_worker_owner;
293 BufferPool *const pool = frame_worker_data->pbi->common.buffer_pool;
294 // Signal all the other threads that are waiting for this frame.
295 vp9_frameworker_lock_stats(worker);
296 frame_worker_data->frame_context_ready = 1;
297 lock_buffer_pool(pool);
298 frame_worker_data->pbi->cur_buf->buf.corrupted = 1;
299 unlock_buffer_pool(pool);
300 frame_worker_data->pbi->need_resync = 1;
301 vp9_frameworker_signal_stats(worker);
302 vp9_frameworker_unlock_stats(worker);
303 return 0;
304 }
305 } else if (frame_worker_data->result != 0) {
306 // Check decode result in serial decode.
307 frame_worker_data->pbi->cur_buf->buf.corrupted = 1;
308 frame_worker_data->pbi->need_resync = 1;
309 }
310 return !frame_worker_data->result;
311 }
312
init_decoder(vpx_codec_alg_priv_t * ctx)313 static vpx_codec_err_t init_decoder(vpx_codec_alg_priv_t *ctx) {
314 int i;
315 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
316
317 ctx->last_show_frame = -1;
318 ctx->next_submit_worker_id = 0;
319 ctx->last_submit_worker_id = 0;
320 ctx->next_output_worker_id = 0;
321 ctx->frame_cache_read = 0;
322 ctx->frame_cache_write = 0;
323 ctx->num_cache_frames = 0;
324 ctx->need_resync = 1;
325 ctx->num_frame_workers =
326 (ctx->frame_parallel_decode == 1) ? ctx->cfg.threads: 1;
327 if (ctx->num_frame_workers > MAX_DECODE_THREADS)
328 ctx->num_frame_workers = MAX_DECODE_THREADS;
329 ctx->available_threads = ctx->num_frame_workers;
330 ctx->flushed = 0;
331
332 ctx->buffer_pool = (BufferPool *)vpx_calloc(1, sizeof(BufferPool));
333 if (ctx->buffer_pool == NULL)
334 return VPX_CODEC_MEM_ERROR;
335
336 #if CONFIG_MULTITHREAD
337 if (pthread_mutex_init(&ctx->buffer_pool->pool_mutex, NULL)) {
338 set_error_detail(ctx, "Failed to allocate buffer pool mutex");
339 return VPX_CODEC_MEM_ERROR;
340 }
341 #endif
342
343 ctx->frame_workers = (VPxWorker *)
344 vpx_malloc(ctx->num_frame_workers * sizeof(*ctx->frame_workers));
345 if (ctx->frame_workers == NULL) {
346 set_error_detail(ctx, "Failed to allocate frame_workers");
347 return VPX_CODEC_MEM_ERROR;
348 }
349
350 for (i = 0; i < ctx->num_frame_workers; ++i) {
351 VPxWorker *const worker = &ctx->frame_workers[i];
352 FrameWorkerData *frame_worker_data = NULL;
353 winterface->init(worker);
354 worker->data1 = vpx_memalign(32, sizeof(FrameWorkerData));
355 if (worker->data1 == NULL) {
356 set_error_detail(ctx, "Failed to allocate frame_worker_data");
357 return VPX_CODEC_MEM_ERROR;
358 }
359 frame_worker_data = (FrameWorkerData *)worker->data1;
360 frame_worker_data->pbi = vp9_decoder_create(ctx->buffer_pool);
361 if (frame_worker_data->pbi == NULL) {
362 set_error_detail(ctx, "Failed to allocate frame_worker_data");
363 return VPX_CODEC_MEM_ERROR;
364 }
365 frame_worker_data->pbi->frame_worker_owner = worker;
366 frame_worker_data->worker_id = i;
367 frame_worker_data->scratch_buffer = NULL;
368 frame_worker_data->scratch_buffer_size = 0;
369 frame_worker_data->frame_context_ready = 0;
370 frame_worker_data->received_frame = 0;
371 #if CONFIG_MULTITHREAD
372 if (pthread_mutex_init(&frame_worker_data->stats_mutex, NULL)) {
373 set_error_detail(ctx, "Failed to allocate frame_worker_data mutex");
374 return VPX_CODEC_MEM_ERROR;
375 }
376
377 if (pthread_cond_init(&frame_worker_data->stats_cond, NULL)) {
378 set_error_detail(ctx, "Failed to allocate frame_worker_data cond");
379 return VPX_CODEC_MEM_ERROR;
380 }
381 #endif
382 // If decoding in serial mode, FrameWorker thread could create tile worker
383 // thread or loopfilter thread.
384 frame_worker_data->pbi->max_threads =
385 (ctx->frame_parallel_decode == 0) ? ctx->cfg.threads : 0;
386
387 frame_worker_data->pbi->inv_tile_order = ctx->invert_tile_order;
388 frame_worker_data->pbi->frame_parallel_decode = ctx->frame_parallel_decode;
389 frame_worker_data->pbi->common.frame_parallel_decode =
390 ctx->frame_parallel_decode;
391 worker->hook = (VPxWorkerHook)frame_worker_hook;
392 if (!winterface->reset(worker)) {
393 set_error_detail(ctx, "Frame Worker thread creation failed");
394 return VPX_CODEC_MEM_ERROR;
395 }
396 }
397
398 // If postprocessing was enabled by the application and a
399 // configuration has not been provided, default it.
400 if (!ctx->postproc_cfg_set &&
401 (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
402 set_default_ppflags(&ctx->postproc_cfg);
403
404 init_buffer_callbacks(ctx);
405
406 return VPX_CODEC_OK;
407 }
408
check_resync(vpx_codec_alg_priv_t * const ctx,const VP9Decoder * const pbi)409 static INLINE void check_resync(vpx_codec_alg_priv_t *const ctx,
410 const VP9Decoder *const pbi) {
411 // Clear resync flag if worker got a key frame or intra only frame.
412 if (ctx->need_resync == 1 && pbi->need_resync == 0 &&
413 (pbi->common.intra_only || pbi->common.frame_type == KEY_FRAME))
414 ctx->need_resync = 0;
415 }
416
decode_one(vpx_codec_alg_priv_t * ctx,const uint8_t ** data,unsigned int data_sz,void * user_priv,int64_t deadline)417 static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx,
418 const uint8_t **data, unsigned int data_sz,
419 void *user_priv, int64_t deadline) {
420 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
421 (void)deadline;
422
423 // Determine the stream parameters. Note that we rely on peek_si to
424 // validate that we have a buffer that does not wrap around the top
425 // of the heap.
426 if (!ctx->si.h) {
427 int is_intra_only = 0;
428 const vpx_codec_err_t res =
429 decoder_peek_si_internal(*data, data_sz, &ctx->si, &is_intra_only,
430 ctx->decrypt_cb, ctx->decrypt_state);
431 if (res != VPX_CODEC_OK)
432 return res;
433
434 if (!ctx->si.is_kf && !is_intra_only)
435 return VPX_CODEC_ERROR;
436 }
437
438 if (!ctx->frame_parallel_decode) {
439 VPxWorker *const worker = ctx->frame_workers;
440 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
441 frame_worker_data->data = *data;
442 frame_worker_data->data_size = data_sz;
443 frame_worker_data->user_priv = user_priv;
444 frame_worker_data->received_frame = 1;
445
446 // Set these even if already initialized. The caller may have changed the
447 // decrypt config between frames.
448 frame_worker_data->pbi->decrypt_cb = ctx->decrypt_cb;
449 frame_worker_data->pbi->decrypt_state = ctx->decrypt_state;
450
451 worker->had_error = 0;
452 winterface->execute(worker);
453
454 // Update data pointer after decode.
455 *data = frame_worker_data->data_end;
456
457 if (worker->had_error)
458 return update_error_state(ctx, &frame_worker_data->pbi->common.error);
459
460 check_resync(ctx, frame_worker_data->pbi);
461 } else {
462 VPxWorker *const worker = &ctx->frame_workers[ctx->next_submit_worker_id];
463 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
464 // Copy context from last worker thread to next worker thread.
465 if (ctx->next_submit_worker_id != ctx->last_submit_worker_id)
466 vp9_frameworker_copy_context(
467 &ctx->frame_workers[ctx->next_submit_worker_id],
468 &ctx->frame_workers[ctx->last_submit_worker_id]);
469
470 frame_worker_data->pbi->ready_for_new_data = 0;
471 // Copy the compressed data into worker's internal buffer.
472 // TODO(hkuang): Will all the workers allocate the same size
473 // as the size of the first intra frame be better? This will
474 // avoid too many deallocate and allocate.
475 if (frame_worker_data->scratch_buffer_size < data_sz) {
476 frame_worker_data->scratch_buffer =
477 (uint8_t *)vpx_realloc(frame_worker_data->scratch_buffer, data_sz);
478 if (frame_worker_data->scratch_buffer == NULL) {
479 set_error_detail(ctx, "Failed to reallocate scratch buffer");
480 return VPX_CODEC_MEM_ERROR;
481 }
482 frame_worker_data->scratch_buffer_size = data_sz;
483 }
484 frame_worker_data->data_size = data_sz;
485 memcpy(frame_worker_data->scratch_buffer, *data, data_sz);
486
487 frame_worker_data->frame_decoded = 0;
488 frame_worker_data->frame_context_ready = 0;
489 frame_worker_data->received_frame = 1;
490 frame_worker_data->data = frame_worker_data->scratch_buffer;
491 frame_worker_data->user_priv = user_priv;
492
493 if (ctx->next_submit_worker_id != ctx->last_submit_worker_id)
494 ctx->last_submit_worker_id =
495 (ctx->last_submit_worker_id + 1) % ctx->num_frame_workers;
496
497 ctx->next_submit_worker_id =
498 (ctx->next_submit_worker_id + 1) % ctx->num_frame_workers;
499 --ctx->available_threads;
500 worker->had_error = 0;
501 winterface->launch(worker);
502 }
503
504 return VPX_CODEC_OK;
505 }
506
wait_worker_and_cache_frame(vpx_codec_alg_priv_t * ctx)507 static void wait_worker_and_cache_frame(vpx_codec_alg_priv_t *ctx) {
508 YV12_BUFFER_CONFIG sd;
509 vp9_ppflags_t flags = {0, 0, 0};
510 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
511 VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
512 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
513 ctx->next_output_worker_id =
514 (ctx->next_output_worker_id + 1) % ctx->num_frame_workers;
515 // TODO(hkuang): Add worker error handling here.
516 winterface->sync(worker);
517 frame_worker_data->received_frame = 0;
518 ++ctx->available_threads;
519
520 check_resync(ctx, frame_worker_data->pbi);
521
522 if (vp9_get_raw_frame(frame_worker_data->pbi, &sd, &flags) == 0) {
523 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
524 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
525 ctx->frame_cache[ctx->frame_cache_write].fb_idx = cm->new_fb_idx;
526 yuvconfig2image(&ctx->frame_cache[ctx->frame_cache_write].img, &sd,
527 frame_worker_data->user_priv);
528 ctx->frame_cache[ctx->frame_cache_write].img.fb_priv =
529 frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
530 ctx->frame_cache_write =
531 (ctx->frame_cache_write + 1) % FRAME_CACHE_SIZE;
532 ++ctx->num_cache_frames;
533 }
534 }
535
decoder_decode(vpx_codec_alg_priv_t * ctx,const uint8_t * data,unsigned int data_sz,void * user_priv,long deadline)536 static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
537 const uint8_t *data, unsigned int data_sz,
538 void *user_priv, long deadline) {
539 const uint8_t *data_start = data;
540 const uint8_t * const data_end = data + data_sz;
541 vpx_codec_err_t res;
542 uint32_t frame_sizes[8];
543 int frame_count;
544
545 if (data == NULL && data_sz == 0) {
546 ctx->flushed = 1;
547 return VPX_CODEC_OK;
548 }
549
550 // Reset flushed when receiving a valid frame.
551 ctx->flushed = 0;
552
553 // Initialize the decoder workers on the first frame.
554 if (ctx->frame_workers == NULL) {
555 const vpx_codec_err_t res = init_decoder(ctx);
556 if (res != VPX_CODEC_OK)
557 return res;
558 }
559
560 res = vp9_parse_superframe_index(data, data_sz, frame_sizes, &frame_count,
561 ctx->decrypt_cb, ctx->decrypt_state);
562 if (res != VPX_CODEC_OK)
563 return res;
564
565 if (ctx->frame_parallel_decode) {
566 // Decode in frame parallel mode. When decoding in this mode, the frame
567 // passed to the decoder must be either a normal frame or a superframe with
568 // superframe index so the decoder could get each frame's start position
569 // in the superframe.
570 if (frame_count > 0) {
571 int i;
572
573 for (i = 0; i < frame_count; ++i) {
574 const uint8_t *data_start_copy = data_start;
575 const uint32_t frame_size = frame_sizes[i];
576 if (data_start < data
577 || frame_size > (uint32_t) (data_end - data_start)) {
578 set_error_detail(ctx, "Invalid frame size in index");
579 return VPX_CODEC_CORRUPT_FRAME;
580 }
581
582 if (ctx->available_threads == 0) {
583 // No more threads for decoding. Wait until the next output worker
584 // finishes decoding. Then copy the decoded frame into cache.
585 if (ctx->num_cache_frames < FRAME_CACHE_SIZE) {
586 wait_worker_and_cache_frame(ctx);
587 } else {
588 // TODO(hkuang): Add unit test to test this path.
589 set_error_detail(ctx, "Frame output cache is full.");
590 return VPX_CODEC_ERROR;
591 }
592 }
593
594 res = decode_one(ctx, &data_start_copy, frame_size, user_priv,
595 deadline);
596 if (res != VPX_CODEC_OK)
597 return res;
598 data_start += frame_size;
599 }
600 } else {
601 if (ctx->available_threads == 0) {
602 // No more threads for decoding. Wait until the next output worker
603 // finishes decoding. Then copy the decoded frame into cache.
604 if (ctx->num_cache_frames < FRAME_CACHE_SIZE) {
605 wait_worker_and_cache_frame(ctx);
606 } else {
607 // TODO(hkuang): Add unit test to test this path.
608 set_error_detail(ctx, "Frame output cache is full.");
609 return VPX_CODEC_ERROR;
610 }
611 }
612
613 res = decode_one(ctx, &data, data_sz, user_priv, deadline);
614 if (res != VPX_CODEC_OK)
615 return res;
616 }
617 } else {
618 // Decode in serial mode.
619 if (frame_count > 0) {
620 int i;
621
622 for (i = 0; i < frame_count; ++i) {
623 const uint8_t *data_start_copy = data_start;
624 const uint32_t frame_size = frame_sizes[i];
625 vpx_codec_err_t res;
626 if (data_start < data
627 || frame_size > (uint32_t) (data_end - data_start)) {
628 set_error_detail(ctx, "Invalid frame size in index");
629 return VPX_CODEC_CORRUPT_FRAME;
630 }
631
632 res = decode_one(ctx, &data_start_copy, frame_size, user_priv,
633 deadline);
634 if (res != VPX_CODEC_OK)
635 return res;
636
637 data_start += frame_size;
638 }
639 } else {
640 while (data_start < data_end) {
641 const uint32_t frame_size = (uint32_t) (data_end - data_start);
642 const vpx_codec_err_t res = decode_one(ctx, &data_start, frame_size,
643 user_priv, deadline);
644 if (res != VPX_CODEC_OK)
645 return res;
646
647 // Account for suboptimal termination by the encoder.
648 while (data_start < data_end) {
649 const uint8_t marker = read_marker(ctx->decrypt_cb,
650 ctx->decrypt_state, data_start);
651 if (marker)
652 break;
653 ++data_start;
654 }
655 }
656 }
657 }
658
659 return res;
660 }
661
release_last_output_frame(vpx_codec_alg_priv_t * ctx)662 static void release_last_output_frame(vpx_codec_alg_priv_t *ctx) {
663 RefCntBuffer *const frame_bufs = ctx->buffer_pool->frame_bufs;
664 // Decrease reference count of last output frame in frame parallel mode.
665 if (ctx->frame_parallel_decode && ctx->last_show_frame >= 0) {
666 BufferPool *const pool = ctx->buffer_pool;
667 lock_buffer_pool(pool);
668 decrease_ref_count(ctx->last_show_frame, frame_bufs, pool);
669 unlock_buffer_pool(pool);
670 }
671 }
672
decoder_get_frame(vpx_codec_alg_priv_t * ctx,vpx_codec_iter_t * iter)673 static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
674 vpx_codec_iter_t *iter) {
675 vpx_image_t *img = NULL;
676
677 // Only return frame when all the cpu are busy or
678 // application fluhsed the decoder in frame parallel decode.
679 if (ctx->frame_parallel_decode && ctx->available_threads > 0 &&
680 !ctx->flushed) {
681 return NULL;
682 }
683
684 // Output the frames in the cache first.
685 if (ctx->num_cache_frames > 0) {
686 release_last_output_frame(ctx);
687 ctx->last_show_frame = ctx->frame_cache[ctx->frame_cache_read].fb_idx;
688 if (ctx->need_resync)
689 return NULL;
690 img = &ctx->frame_cache[ctx->frame_cache_read].img;
691 ctx->frame_cache_read = (ctx->frame_cache_read + 1) % FRAME_CACHE_SIZE;
692 --ctx->num_cache_frames;
693 return img;
694 }
695
696 // iter acts as a flip flop, so an image is only returned on the first
697 // call to get_frame.
698 if (*iter == NULL && ctx->frame_workers != NULL) {
699 do {
700 YV12_BUFFER_CONFIG sd;
701 vp9_ppflags_t flags = {0, 0, 0};
702 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
703 VPxWorker *const worker =
704 &ctx->frame_workers[ctx->next_output_worker_id];
705 FrameWorkerData *const frame_worker_data =
706 (FrameWorkerData *)worker->data1;
707 ctx->next_output_worker_id =
708 (ctx->next_output_worker_id + 1) % ctx->num_frame_workers;
709 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
710 set_ppflags(ctx, &flags);
711 // Wait for the frame from worker thread.
712 if (winterface->sync(worker)) {
713 // Check if worker has received any frames.
714 if (frame_worker_data->received_frame == 1) {
715 ++ctx->available_threads;
716 frame_worker_data->received_frame = 0;
717 check_resync(ctx, frame_worker_data->pbi);
718 }
719 if (vp9_get_raw_frame(frame_worker_data->pbi, &sd, &flags) == 0) {
720 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
721 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
722 release_last_output_frame(ctx);
723 ctx->last_show_frame = frame_worker_data->pbi->common.new_fb_idx;
724 if (ctx->need_resync)
725 return NULL;
726 yuvconfig2image(&ctx->img, &sd, frame_worker_data->user_priv);
727 ctx->img.fb_priv = frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
728 img = &ctx->img;
729 return img;
730 }
731 } else {
732 // Decoding failed. Release the worker thread.
733 frame_worker_data->received_frame = 0;
734 ++ctx->available_threads;
735 ctx->need_resync = 1;
736 if (ctx->flushed != 1)
737 return NULL;
738 }
739 } while (ctx->next_output_worker_id != ctx->next_submit_worker_id);
740 }
741 return NULL;
742 }
743
decoder_set_fb_fn(vpx_codec_alg_priv_t * ctx,vpx_get_frame_buffer_cb_fn_t cb_get,vpx_release_frame_buffer_cb_fn_t cb_release,void * cb_priv)744 static vpx_codec_err_t decoder_set_fb_fn(
745 vpx_codec_alg_priv_t *ctx,
746 vpx_get_frame_buffer_cb_fn_t cb_get,
747 vpx_release_frame_buffer_cb_fn_t cb_release, void *cb_priv) {
748 if (cb_get == NULL || cb_release == NULL) {
749 return VPX_CODEC_INVALID_PARAM;
750 } else if (ctx->frame_workers == NULL) {
751 // If the decoder has already been initialized, do not accept changes to
752 // the frame buffer functions.
753 ctx->get_ext_fb_cb = cb_get;
754 ctx->release_ext_fb_cb = cb_release;
755 ctx->ext_priv = cb_priv;
756 return VPX_CODEC_OK;
757 }
758
759 return VPX_CODEC_ERROR;
760 }
761
ctrl_set_reference(vpx_codec_alg_priv_t * ctx,va_list args)762 static vpx_codec_err_t ctrl_set_reference(vpx_codec_alg_priv_t *ctx,
763 va_list args) {
764 vpx_ref_frame_t *const data = va_arg(args, vpx_ref_frame_t *);
765
766 // Only support this function in serial decode.
767 if (ctx->frame_parallel_decode) {
768 set_error_detail(ctx, "Not supported in frame parallel decode");
769 return VPX_CODEC_INCAPABLE;
770 }
771
772 if (data) {
773 vpx_ref_frame_t *const frame = (vpx_ref_frame_t *)data;
774 YV12_BUFFER_CONFIG sd;
775 VPxWorker *const worker = ctx->frame_workers;
776 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
777 image2yuvconfig(&frame->img, &sd);
778 return vp9_set_reference_dec(&frame_worker_data->pbi->common,
779 (VP9_REFFRAME)frame->frame_type, &sd);
780 } else {
781 return VPX_CODEC_INVALID_PARAM;
782 }
783 }
784
ctrl_copy_reference(vpx_codec_alg_priv_t * ctx,va_list args)785 static vpx_codec_err_t ctrl_copy_reference(vpx_codec_alg_priv_t *ctx,
786 va_list args) {
787 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
788
789 // Only support this function in serial decode.
790 if (ctx->frame_parallel_decode) {
791 set_error_detail(ctx, "Not supported in frame parallel decode");
792 return VPX_CODEC_INCAPABLE;
793 }
794
795 if (data) {
796 vpx_ref_frame_t *frame = (vpx_ref_frame_t *) data;
797 YV12_BUFFER_CONFIG sd;
798 VPxWorker *const worker = ctx->frame_workers;
799 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
800 image2yuvconfig(&frame->img, &sd);
801 return vp9_copy_reference_dec(frame_worker_data->pbi,
802 (VP9_REFFRAME)frame->frame_type, &sd);
803 } else {
804 return VPX_CODEC_INVALID_PARAM;
805 }
806 }
807
ctrl_get_reference(vpx_codec_alg_priv_t * ctx,va_list args)808 static vpx_codec_err_t ctrl_get_reference(vpx_codec_alg_priv_t *ctx,
809 va_list args) {
810 vp9_ref_frame_t *data = va_arg(args, vp9_ref_frame_t *);
811
812 // Only support this function in serial decode.
813 if (ctx->frame_parallel_decode) {
814 set_error_detail(ctx, "Not supported in frame parallel decode");
815 return VPX_CODEC_INCAPABLE;
816 }
817
818 if (data) {
819 YV12_BUFFER_CONFIG* fb;
820 VPxWorker *const worker = ctx->frame_workers;
821 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
822 fb = get_ref_frame(&frame_worker_data->pbi->common, data->idx);
823 if (fb == NULL) return VPX_CODEC_ERROR;
824 yuvconfig2image(&data->img, fb, NULL);
825 return VPX_CODEC_OK;
826 } else {
827 return VPX_CODEC_INVALID_PARAM;
828 }
829 }
830
ctrl_set_postproc(vpx_codec_alg_priv_t * ctx,va_list args)831 static vpx_codec_err_t ctrl_set_postproc(vpx_codec_alg_priv_t *ctx,
832 va_list args) {
833 #if CONFIG_VP9_POSTPROC
834 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
835
836 if (data) {
837 ctx->postproc_cfg_set = 1;
838 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
839 return VPX_CODEC_OK;
840 } else {
841 return VPX_CODEC_INVALID_PARAM;
842 }
843 #else
844 (void)ctx;
845 (void)args;
846 return VPX_CODEC_INCAPABLE;
847 #endif
848 }
849
ctrl_set_dbg_options(vpx_codec_alg_priv_t * ctx,va_list args)850 static vpx_codec_err_t ctrl_set_dbg_options(vpx_codec_alg_priv_t *ctx,
851 va_list args) {
852 (void)ctx;
853 (void)args;
854 return VPX_CODEC_INCAPABLE;
855 }
856
ctrl_get_last_ref_updates(vpx_codec_alg_priv_t * ctx,va_list args)857 static vpx_codec_err_t ctrl_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
858 va_list args) {
859 int *const update_info = va_arg(args, int *);
860
861 // Only support this function in serial decode.
862 if (ctx->frame_parallel_decode) {
863 set_error_detail(ctx, "Not supported in frame parallel decode");
864 return VPX_CODEC_INCAPABLE;
865 }
866
867 if (update_info) {
868 if (ctx->frame_workers) {
869 VPxWorker *const worker = ctx->frame_workers;
870 FrameWorkerData *const frame_worker_data =
871 (FrameWorkerData *)worker->data1;
872 *update_info = frame_worker_data->pbi->refresh_frame_flags;
873 return VPX_CODEC_OK;
874 } else {
875 return VPX_CODEC_ERROR;
876 }
877 }
878
879 return VPX_CODEC_INVALID_PARAM;
880 }
881
ctrl_get_frame_corrupted(vpx_codec_alg_priv_t * ctx,va_list args)882 static vpx_codec_err_t ctrl_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
883 va_list args) {
884 int *corrupted = va_arg(args, int *);
885
886 if (corrupted) {
887 if (ctx->frame_workers) {
888 VPxWorker *const worker = ctx->frame_workers;
889 FrameWorkerData *const frame_worker_data =
890 (FrameWorkerData *)worker->data1;
891 RefCntBuffer *const frame_bufs =
892 frame_worker_data->pbi->common.buffer_pool->frame_bufs;
893 if (frame_worker_data->pbi->common.frame_to_show == NULL)
894 return VPX_CODEC_ERROR;
895 if (ctx->last_show_frame >= 0)
896 *corrupted = frame_bufs[ctx->last_show_frame].buf.corrupted;
897 return VPX_CODEC_OK;
898 } else {
899 return VPX_CODEC_ERROR;
900 }
901 }
902
903 return VPX_CODEC_INVALID_PARAM;
904 }
905
ctrl_get_frame_size(vpx_codec_alg_priv_t * ctx,va_list args)906 static vpx_codec_err_t ctrl_get_frame_size(vpx_codec_alg_priv_t *ctx,
907 va_list args) {
908 int *const frame_size = va_arg(args, int *);
909
910 // Only support this function in serial decode.
911 if (ctx->frame_parallel_decode) {
912 set_error_detail(ctx, "Not supported in frame parallel decode");
913 return VPX_CODEC_INCAPABLE;
914 }
915
916 if (frame_size) {
917 if (ctx->frame_workers) {
918 VPxWorker *const worker = ctx->frame_workers;
919 FrameWorkerData *const frame_worker_data =
920 (FrameWorkerData *)worker->data1;
921 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
922 frame_size[0] = cm->width;
923 frame_size[1] = cm->height;
924 return VPX_CODEC_OK;
925 } else {
926 return VPX_CODEC_ERROR;
927 }
928 }
929
930 return VPX_CODEC_INVALID_PARAM;
931 }
932
ctrl_get_render_size(vpx_codec_alg_priv_t * ctx,va_list args)933 static vpx_codec_err_t ctrl_get_render_size(vpx_codec_alg_priv_t *ctx,
934 va_list args) {
935 int *const render_size = va_arg(args, int *);
936
937 // Only support this function in serial decode.
938 if (ctx->frame_parallel_decode) {
939 set_error_detail(ctx, "Not supported in frame parallel decode");
940 return VPX_CODEC_INCAPABLE;
941 }
942
943 if (render_size) {
944 if (ctx->frame_workers) {
945 VPxWorker *const worker = ctx->frame_workers;
946 FrameWorkerData *const frame_worker_data =
947 (FrameWorkerData *)worker->data1;
948 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
949 render_size[0] = cm->render_width;
950 render_size[1] = cm->render_height;
951 return VPX_CODEC_OK;
952 } else {
953 return VPX_CODEC_ERROR;
954 }
955 }
956
957 return VPX_CODEC_INVALID_PARAM;
958 }
959
ctrl_get_bit_depth(vpx_codec_alg_priv_t * ctx,va_list args)960 static vpx_codec_err_t ctrl_get_bit_depth(vpx_codec_alg_priv_t *ctx,
961 va_list args) {
962 unsigned int *const bit_depth = va_arg(args, unsigned int *);
963 VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
964
965 if (bit_depth) {
966 if (worker) {
967 FrameWorkerData *const frame_worker_data =
968 (FrameWorkerData *)worker->data1;
969 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
970 *bit_depth = cm->bit_depth;
971 return VPX_CODEC_OK;
972 } else {
973 return VPX_CODEC_ERROR;
974 }
975 }
976
977 return VPX_CODEC_INVALID_PARAM;
978 }
979
ctrl_set_invert_tile_order(vpx_codec_alg_priv_t * ctx,va_list args)980 static vpx_codec_err_t ctrl_set_invert_tile_order(vpx_codec_alg_priv_t *ctx,
981 va_list args) {
982 ctx->invert_tile_order = va_arg(args, int);
983 return VPX_CODEC_OK;
984 }
985
ctrl_set_decryptor(vpx_codec_alg_priv_t * ctx,va_list args)986 static vpx_codec_err_t ctrl_set_decryptor(vpx_codec_alg_priv_t *ctx,
987 va_list args) {
988 vpx_decrypt_init *init = va_arg(args, vpx_decrypt_init *);
989 ctx->decrypt_cb = init ? init->decrypt_cb : NULL;
990 ctx->decrypt_state = init ? init->decrypt_state : NULL;
991 return VPX_CODEC_OK;
992 }
993
ctrl_set_byte_alignment(vpx_codec_alg_priv_t * ctx,va_list args)994 static vpx_codec_err_t ctrl_set_byte_alignment(vpx_codec_alg_priv_t *ctx,
995 va_list args) {
996 const int legacy_byte_alignment = 0;
997 const int min_byte_alignment = 32;
998 const int max_byte_alignment = 1024;
999 const int byte_alignment = va_arg(args, int);
1000
1001 if (byte_alignment != legacy_byte_alignment &&
1002 (byte_alignment < min_byte_alignment ||
1003 byte_alignment > max_byte_alignment ||
1004 (byte_alignment & (byte_alignment - 1)) != 0))
1005 return VPX_CODEC_INVALID_PARAM;
1006
1007 ctx->byte_alignment = byte_alignment;
1008 if (ctx->frame_workers) {
1009 VPxWorker *const worker = ctx->frame_workers;
1010 FrameWorkerData *const frame_worker_data =
1011 (FrameWorkerData *)worker->data1;
1012 frame_worker_data->pbi->common.byte_alignment = byte_alignment;
1013 }
1014 return VPX_CODEC_OK;
1015 }
1016
ctrl_set_skip_loop_filter(vpx_codec_alg_priv_t * ctx,va_list args)1017 static vpx_codec_err_t ctrl_set_skip_loop_filter(vpx_codec_alg_priv_t *ctx,
1018 va_list args) {
1019 ctx->skip_loop_filter = va_arg(args, int);
1020
1021 if (ctx->frame_workers) {
1022 VPxWorker *const worker = ctx->frame_workers;
1023 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
1024 frame_worker_data->pbi->common.skip_loop_filter = ctx->skip_loop_filter;
1025 }
1026
1027 return VPX_CODEC_OK;
1028 }
1029
1030 static vpx_codec_ctrl_fn_map_t decoder_ctrl_maps[] = {
1031 {VP8_COPY_REFERENCE, ctrl_copy_reference},
1032
1033 // Setters
1034 {VP8_SET_REFERENCE, ctrl_set_reference},
1035 {VP8_SET_POSTPROC, ctrl_set_postproc},
1036 {VP8_SET_DBG_COLOR_REF_FRAME, ctrl_set_dbg_options},
1037 {VP8_SET_DBG_COLOR_MB_MODES, ctrl_set_dbg_options},
1038 {VP8_SET_DBG_COLOR_B_MODES, ctrl_set_dbg_options},
1039 {VP8_SET_DBG_DISPLAY_MV, ctrl_set_dbg_options},
1040 {VP9_INVERT_TILE_DECODE_ORDER, ctrl_set_invert_tile_order},
1041 {VPXD_SET_DECRYPTOR, ctrl_set_decryptor},
1042 {VP9_SET_BYTE_ALIGNMENT, ctrl_set_byte_alignment},
1043 {VP9_SET_SKIP_LOOP_FILTER, ctrl_set_skip_loop_filter},
1044
1045 // Getters
1046 {VP8D_GET_LAST_REF_UPDATES, ctrl_get_last_ref_updates},
1047 {VP8D_GET_FRAME_CORRUPTED, ctrl_get_frame_corrupted},
1048 {VP9_GET_REFERENCE, ctrl_get_reference},
1049 {VP9D_GET_DISPLAY_SIZE, ctrl_get_render_size},
1050 {VP9D_GET_BIT_DEPTH, ctrl_get_bit_depth},
1051 {VP9D_GET_FRAME_SIZE, ctrl_get_frame_size},
1052
1053 { -1, NULL},
1054 };
1055
1056 #ifndef VERSION_STRING
1057 #define VERSION_STRING
1058 #endif
1059 CODEC_INTERFACE(vpx_codec_vp9_dx) = {
1060 "WebM Project VP9 Decoder" VERSION_STRING,
1061 VPX_CODEC_INTERNAL_ABI_VERSION,
1062 VPX_CODEC_CAP_DECODER | VP9_CAP_POSTPROC |
1063 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER, // vpx_codec_caps_t
1064 decoder_init, // vpx_codec_init_fn_t
1065 decoder_destroy, // vpx_codec_destroy_fn_t
1066 decoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
1067 { // NOLINT
1068 decoder_peek_si, // vpx_codec_peek_si_fn_t
1069 decoder_get_si, // vpx_codec_get_si_fn_t
1070 decoder_decode, // vpx_codec_decode_fn_t
1071 decoder_get_frame, // vpx_codec_frame_get_fn_t
1072 decoder_set_fb_fn, // vpx_codec_set_fb_fn_t
1073 },
1074 { // NOLINT
1075 0,
1076 NULL, // vpx_codec_enc_cfg_map_t
1077 NULL, // vpx_codec_encode_fn_t
1078 NULL, // vpx_codec_get_cx_data_fn_t
1079 NULL, // vpx_codec_enc_config_set_fn_t
1080 NULL, // vpx_codec_get_global_headers_fn_t
1081 NULL, // vpx_codec_get_preview_frame_fn_t
1082 NULL // vpx_codec_enc_mr_get_mem_loc_fn_t
1083 }
1084 };
1085