1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <stdlib.h>
12 #include <string.h>
13
14 #include "./vpx_config.h"
15 #include "./vpx_version.h"
16
17 #include "vpx/internal/vpx_codec_internal.h"
18 #include "vpx/vp8dx.h"
19 #include "vpx/vpx_decoder.h"
20 #include "vpx_dsp/bitreader_buffer.h"
21 #include "vpx_dsp/vpx_dsp_common.h"
22 #include "vpx_util/vpx_thread.h"
23
24 #include "vp9/common/vp9_alloccommon.h"
25 #include "vp9/common/vp9_frame_buffers.h"
26
27 #include "vp9/decoder/vp9_decodeframe.h"
28
29 #include "vp9/vp9_dx_iface.h"
30 #include "vp9/vp9_iface_common.h"
31
32 #define VP9_CAP_POSTPROC (CONFIG_VP9_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
33
decoder_init(vpx_codec_ctx_t * ctx,vpx_codec_priv_enc_mr_cfg_t * data)34 static vpx_codec_err_t decoder_init(vpx_codec_ctx_t *ctx,
35 vpx_codec_priv_enc_mr_cfg_t *data) {
36 // This function only allocates space for the vpx_codec_alg_priv_t
37 // structure. More memory may be required at the time the stream
38 // information becomes known.
39 (void)data;
40
41 if (!ctx->priv) {
42 vpx_codec_alg_priv_t *const priv =
43 (vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
44 if (priv == NULL) return VPX_CODEC_MEM_ERROR;
45
46 ctx->priv = (vpx_codec_priv_t *)priv;
47 ctx->priv->init_flags = ctx->init_flags;
48 priv->si.sz = sizeof(priv->si);
49 priv->flushed = 0;
50 // TODO(jzern): remnants of frame-level parallel decoding should be
51 // removed. cf., https://bugs.chromium.org/p/webm/issues/detail?id=1395
52 priv->frame_parallel_decode = 0;
53 if (ctx->config.dec) {
54 priv->cfg = *ctx->config.dec;
55 ctx->config.dec = &priv->cfg;
56 }
57 }
58
59 return VPX_CODEC_OK;
60 }
61
decoder_destroy(vpx_codec_alg_priv_t * ctx)62 static vpx_codec_err_t decoder_destroy(vpx_codec_alg_priv_t *ctx) {
63 if (ctx->frame_workers != NULL) {
64 int i;
65 // Shutdown all threads before reclaiming any memory. The frame-level
66 // parallel decoder may access data from another worker.
67 for (i = 0; i < ctx->num_frame_workers; ++i) {
68 VPxWorker *const worker = &ctx->frame_workers[i];
69 vpx_get_worker_interface()->end(worker);
70 }
71 for (i = 0; i < ctx->num_frame_workers; ++i) {
72 VPxWorker *const worker = &ctx->frame_workers[i];
73 FrameWorkerData *const frame_worker_data =
74 (FrameWorkerData *)worker->data1;
75 vp9_remove_common(&frame_worker_data->pbi->common);
76 #if CONFIG_VP9_POSTPROC
77 vp9_free_postproc_buffers(&frame_worker_data->pbi->common);
78 #endif
79 vp9_decoder_remove(frame_worker_data->pbi);
80 vpx_free(frame_worker_data->scratch_buffer);
81 #if CONFIG_MULTITHREAD
82 pthread_mutex_destroy(&frame_worker_data->stats_mutex);
83 pthread_cond_destroy(&frame_worker_data->stats_cond);
84 #endif
85 vpx_free(frame_worker_data);
86 }
87 #if CONFIG_MULTITHREAD
88 pthread_mutex_destroy(&ctx->buffer_pool->pool_mutex);
89 #endif
90 }
91
92 if (ctx->buffer_pool) {
93 vp9_free_ref_frame_buffers(ctx->buffer_pool);
94 vp9_free_internal_frame_buffers(&ctx->buffer_pool->int_frame_buffers);
95 }
96
97 vpx_free(ctx->frame_workers);
98 vpx_free(ctx->buffer_pool);
99 vpx_free(ctx);
100 return VPX_CODEC_OK;
101 }
102
parse_bitdepth_colorspace_sampling(BITSTREAM_PROFILE profile,struct vpx_read_bit_buffer * rb)103 static int parse_bitdepth_colorspace_sampling(BITSTREAM_PROFILE profile,
104 struct vpx_read_bit_buffer *rb) {
105 vpx_color_space_t color_space;
106 if (profile >= PROFILE_2) rb->bit_offset += 1; // Bit-depth 10 or 12.
107 color_space = (vpx_color_space_t)vpx_rb_read_literal(rb, 3);
108 if (color_space != VPX_CS_SRGB) {
109 rb->bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range.
110 if (profile == PROFILE_1 || profile == PROFILE_3) {
111 rb->bit_offset += 2; // subsampling x/y.
112 rb->bit_offset += 1; // unused.
113 }
114 } else {
115 if (profile == PROFILE_1 || profile == PROFILE_3) {
116 rb->bit_offset += 1; // unused
117 } else {
118 // RGB is only available in version 1.
119 return 0;
120 }
121 }
122 return 1;
123 }
124
decoder_peek_si_internal(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si,int * is_intra_only,vpx_decrypt_cb decrypt_cb,void * decrypt_state)125 static vpx_codec_err_t decoder_peek_si_internal(
126 const uint8_t *data, unsigned int data_sz, vpx_codec_stream_info_t *si,
127 int *is_intra_only, vpx_decrypt_cb decrypt_cb, void *decrypt_state) {
128 int intra_only_flag = 0;
129 uint8_t clear_buffer[10];
130
131 if (data + data_sz <= data) return VPX_CODEC_INVALID_PARAM;
132
133 si->is_kf = 0;
134 si->w = si->h = 0;
135
136 if (decrypt_cb) {
137 data_sz = VPXMIN(sizeof(clear_buffer), data_sz);
138 decrypt_cb(decrypt_state, data, clear_buffer, data_sz);
139 data = clear_buffer;
140 }
141
142 // A maximum of 6 bits are needed to read the frame marker, profile and
143 // show_existing_frame.
144 if (data_sz < 1) return VPX_CODEC_UNSUP_BITSTREAM;
145
146 {
147 int show_frame;
148 int error_resilient;
149 struct vpx_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
150 const int frame_marker = vpx_rb_read_literal(&rb, 2);
151 const BITSTREAM_PROFILE profile = vp9_read_profile(&rb);
152
153 if (frame_marker != VP9_FRAME_MARKER) return VPX_CODEC_UNSUP_BITSTREAM;
154
155 if (profile >= MAX_PROFILES) return VPX_CODEC_UNSUP_BITSTREAM;
156
157 if (vpx_rb_read_bit(&rb)) { // show an existing frame
158 // If profile is > 2 and show_existing_frame is true, then at least 1 more
159 // byte (6+3=9 bits) is needed.
160 if (profile > 2 && data_sz < 2) return VPX_CODEC_UNSUP_BITSTREAM;
161 vpx_rb_read_literal(&rb, 3); // Frame buffer to show.
162 return VPX_CODEC_OK;
163 }
164
165 // For the rest of the function, a maximum of 9 more bytes are needed
166 // (computed by taking the maximum possible bits needed in each case). Note
167 // that this has to be updated if we read any more bits in this function.
168 if (data_sz < 10) return VPX_CODEC_UNSUP_BITSTREAM;
169
170 si->is_kf = !vpx_rb_read_bit(&rb);
171 show_frame = vpx_rb_read_bit(&rb);
172 error_resilient = vpx_rb_read_bit(&rb);
173
174 if (si->is_kf) {
175 if (!vp9_read_sync_code(&rb)) return VPX_CODEC_UNSUP_BITSTREAM;
176
177 if (!parse_bitdepth_colorspace_sampling(profile, &rb))
178 return VPX_CODEC_UNSUP_BITSTREAM;
179 vp9_read_frame_size(&rb, (int *)&si->w, (int *)&si->h);
180 } else {
181 intra_only_flag = show_frame ? 0 : vpx_rb_read_bit(&rb);
182
183 rb.bit_offset += error_resilient ? 0 : 2; // reset_frame_context
184
185 if (intra_only_flag) {
186 if (!vp9_read_sync_code(&rb)) return VPX_CODEC_UNSUP_BITSTREAM;
187 if (profile > PROFILE_0) {
188 if (!parse_bitdepth_colorspace_sampling(profile, &rb))
189 return VPX_CODEC_UNSUP_BITSTREAM;
190 }
191 rb.bit_offset += REF_FRAMES; // refresh_frame_flags
192 vp9_read_frame_size(&rb, (int *)&si->w, (int *)&si->h);
193 }
194 }
195 }
196 if (is_intra_only != NULL) *is_intra_only = intra_only_flag;
197 return VPX_CODEC_OK;
198 }
199
decoder_peek_si(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si)200 static vpx_codec_err_t decoder_peek_si(const uint8_t *data,
201 unsigned int data_sz,
202 vpx_codec_stream_info_t *si) {
203 return decoder_peek_si_internal(data, data_sz, si, NULL, NULL, NULL);
204 }
205
decoder_get_si(vpx_codec_alg_priv_t * ctx,vpx_codec_stream_info_t * si)206 static vpx_codec_err_t decoder_get_si(vpx_codec_alg_priv_t *ctx,
207 vpx_codec_stream_info_t *si) {
208 const size_t sz = (si->sz >= sizeof(vp9_stream_info_t))
209 ? sizeof(vp9_stream_info_t)
210 : sizeof(vpx_codec_stream_info_t);
211 memcpy(si, &ctx->si, sz);
212 si->sz = (unsigned int)sz;
213
214 return VPX_CODEC_OK;
215 }
216
set_error_detail(vpx_codec_alg_priv_t * ctx,const char * const error)217 static void set_error_detail(vpx_codec_alg_priv_t *ctx,
218 const char *const error) {
219 ctx->base.err_detail = error;
220 }
221
update_error_state(vpx_codec_alg_priv_t * ctx,const struct vpx_internal_error_info * error)222 static vpx_codec_err_t update_error_state(
223 vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
224 if (error->error_code)
225 set_error_detail(ctx, error->has_detail ? error->detail : NULL);
226
227 return error->error_code;
228 }
229
init_buffer_callbacks(vpx_codec_alg_priv_t * ctx)230 static void init_buffer_callbacks(vpx_codec_alg_priv_t *ctx) {
231 int i;
232
233 for (i = 0; i < ctx->num_frame_workers; ++i) {
234 VPxWorker *const worker = &ctx->frame_workers[i];
235 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
236 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
237 BufferPool *const pool = cm->buffer_pool;
238
239 cm->new_fb_idx = INVALID_IDX;
240 cm->byte_alignment = ctx->byte_alignment;
241 cm->skip_loop_filter = ctx->skip_loop_filter;
242
243 if (ctx->get_ext_fb_cb != NULL && ctx->release_ext_fb_cb != NULL) {
244 pool->get_fb_cb = ctx->get_ext_fb_cb;
245 pool->release_fb_cb = ctx->release_ext_fb_cb;
246 pool->cb_priv = ctx->ext_priv;
247 } else {
248 pool->get_fb_cb = vp9_get_frame_buffer;
249 pool->release_fb_cb = vp9_release_frame_buffer;
250
251 if (vp9_alloc_internal_frame_buffers(&pool->int_frame_buffers))
252 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
253 "Failed to initialize internal frame buffers");
254
255 pool->cb_priv = &pool->int_frame_buffers;
256 }
257 }
258 }
259
set_default_ppflags(vp8_postproc_cfg_t * cfg)260 static void set_default_ppflags(vp8_postproc_cfg_t *cfg) {
261 cfg->post_proc_flag = VP8_DEBLOCK | VP8_DEMACROBLOCK;
262 cfg->deblocking_level = 4;
263 cfg->noise_level = 0;
264 }
265
set_ppflags(const vpx_codec_alg_priv_t * ctx,vp9_ppflags_t * flags)266 static void set_ppflags(const vpx_codec_alg_priv_t *ctx, vp9_ppflags_t *flags) {
267 flags->post_proc_flag = ctx->postproc_cfg.post_proc_flag;
268
269 flags->deblocking_level = ctx->postproc_cfg.deblocking_level;
270 flags->noise_level = ctx->postproc_cfg.noise_level;
271 }
272
frame_worker_hook(void * arg1,void * arg2)273 static int frame_worker_hook(void *arg1, void *arg2) {
274 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)arg1;
275 const uint8_t *data = frame_worker_data->data;
276 (void)arg2;
277
278 frame_worker_data->result = vp9_receive_compressed_data(
279 frame_worker_data->pbi, frame_worker_data->data_size, &data);
280 frame_worker_data->data_end = data;
281
282 if (frame_worker_data->pbi->frame_parallel_decode) {
283 // In frame parallel decoding, a worker thread must successfully decode all
284 // the compressed data.
285 if (frame_worker_data->result != 0 ||
286 frame_worker_data->data + frame_worker_data->data_size - 1 > data) {
287 VPxWorker *const worker = frame_worker_data->pbi->frame_worker_owner;
288 BufferPool *const pool = frame_worker_data->pbi->common.buffer_pool;
289 // Signal all the other threads that are waiting for this frame.
290 vp9_frameworker_lock_stats(worker);
291 frame_worker_data->frame_context_ready = 1;
292 lock_buffer_pool(pool);
293 frame_worker_data->pbi->cur_buf->buf.corrupted = 1;
294 unlock_buffer_pool(pool);
295 frame_worker_data->pbi->need_resync = 1;
296 vp9_frameworker_signal_stats(worker);
297 vp9_frameworker_unlock_stats(worker);
298 return 0;
299 }
300 } else if (frame_worker_data->result != 0) {
301 // Check decode result in serial decode.
302 frame_worker_data->pbi->cur_buf->buf.corrupted = 1;
303 frame_worker_data->pbi->need_resync = 1;
304 }
305 return !frame_worker_data->result;
306 }
307
init_decoder(vpx_codec_alg_priv_t * ctx)308 static vpx_codec_err_t init_decoder(vpx_codec_alg_priv_t *ctx) {
309 int i;
310 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
311
312 ctx->last_show_frame = -1;
313 ctx->next_submit_worker_id = 0;
314 ctx->last_submit_worker_id = 0;
315 ctx->next_output_worker_id = 0;
316 ctx->frame_cache_read = 0;
317 ctx->frame_cache_write = 0;
318 ctx->num_cache_frames = 0;
319 ctx->need_resync = 1;
320 ctx->num_frame_workers =
321 (ctx->frame_parallel_decode == 1) ? ctx->cfg.threads : 1;
322 if (ctx->num_frame_workers > MAX_DECODE_THREADS)
323 ctx->num_frame_workers = MAX_DECODE_THREADS;
324 ctx->available_threads = ctx->num_frame_workers;
325 ctx->flushed = 0;
326
327 ctx->buffer_pool = (BufferPool *)vpx_calloc(1, sizeof(BufferPool));
328 if (ctx->buffer_pool == NULL) return VPX_CODEC_MEM_ERROR;
329
330 #if CONFIG_MULTITHREAD
331 if (pthread_mutex_init(&ctx->buffer_pool->pool_mutex, NULL)) {
332 set_error_detail(ctx, "Failed to allocate buffer pool mutex");
333 return VPX_CODEC_MEM_ERROR;
334 }
335 #endif
336
337 ctx->frame_workers = (VPxWorker *)vpx_malloc(ctx->num_frame_workers *
338 sizeof(*ctx->frame_workers));
339 if (ctx->frame_workers == NULL) {
340 set_error_detail(ctx, "Failed to allocate frame_workers");
341 return VPX_CODEC_MEM_ERROR;
342 }
343
344 for (i = 0; i < ctx->num_frame_workers; ++i) {
345 VPxWorker *const worker = &ctx->frame_workers[i];
346 FrameWorkerData *frame_worker_data = NULL;
347 winterface->init(worker);
348 worker->data1 = vpx_memalign(32, sizeof(FrameWorkerData));
349 if (worker->data1 == NULL) {
350 set_error_detail(ctx, "Failed to allocate frame_worker_data");
351 return VPX_CODEC_MEM_ERROR;
352 }
353 frame_worker_data = (FrameWorkerData *)worker->data1;
354 frame_worker_data->pbi = vp9_decoder_create(ctx->buffer_pool);
355 if (frame_worker_data->pbi == NULL) {
356 set_error_detail(ctx, "Failed to allocate frame_worker_data");
357 return VPX_CODEC_MEM_ERROR;
358 }
359 frame_worker_data->pbi->frame_worker_owner = worker;
360 frame_worker_data->worker_id = i;
361 frame_worker_data->scratch_buffer = NULL;
362 frame_worker_data->scratch_buffer_size = 0;
363 frame_worker_data->frame_context_ready = 0;
364 frame_worker_data->received_frame = 0;
365 #if CONFIG_MULTITHREAD
366 if (pthread_mutex_init(&frame_worker_data->stats_mutex, NULL)) {
367 set_error_detail(ctx, "Failed to allocate frame_worker_data mutex");
368 return VPX_CODEC_MEM_ERROR;
369 }
370
371 if (pthread_cond_init(&frame_worker_data->stats_cond, NULL)) {
372 set_error_detail(ctx, "Failed to allocate frame_worker_data cond");
373 return VPX_CODEC_MEM_ERROR;
374 }
375 #endif
376 // If decoding in serial mode, FrameWorker thread could create tile worker
377 // thread or loopfilter thread.
378 frame_worker_data->pbi->max_threads =
379 (ctx->frame_parallel_decode == 0) ? ctx->cfg.threads : 0;
380
381 frame_worker_data->pbi->inv_tile_order = ctx->invert_tile_order;
382 frame_worker_data->pbi->frame_parallel_decode = ctx->frame_parallel_decode;
383 frame_worker_data->pbi->common.frame_parallel_decode =
384 ctx->frame_parallel_decode;
385 worker->hook = (VPxWorkerHook)frame_worker_hook;
386 if (!winterface->reset(worker)) {
387 set_error_detail(ctx, "Frame Worker thread creation failed");
388 return VPX_CODEC_MEM_ERROR;
389 }
390 }
391
392 // If postprocessing was enabled by the application and a
393 // configuration has not been provided, default it.
394 if (!ctx->postproc_cfg_set && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
395 set_default_ppflags(&ctx->postproc_cfg);
396
397 init_buffer_callbacks(ctx);
398
399 return VPX_CODEC_OK;
400 }
401
check_resync(vpx_codec_alg_priv_t * const ctx,const VP9Decoder * const pbi)402 static INLINE void check_resync(vpx_codec_alg_priv_t *const ctx,
403 const VP9Decoder *const pbi) {
404 // Clear resync flag if worker got a key frame or intra only frame.
405 if (ctx->need_resync == 1 && pbi->need_resync == 0 &&
406 (pbi->common.intra_only || pbi->common.frame_type == KEY_FRAME))
407 ctx->need_resync = 0;
408 }
409
decode_one(vpx_codec_alg_priv_t * ctx,const uint8_t ** data,unsigned int data_sz,void * user_priv,int64_t deadline)410 static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx,
411 const uint8_t **data, unsigned int data_sz,
412 void *user_priv, int64_t deadline) {
413 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
414 (void)deadline;
415
416 // Determine the stream parameters. Note that we rely on peek_si to
417 // validate that we have a buffer that does not wrap around the top
418 // of the heap.
419 if (!ctx->si.h) {
420 int is_intra_only = 0;
421 const vpx_codec_err_t res =
422 decoder_peek_si_internal(*data, data_sz, &ctx->si, &is_intra_only,
423 ctx->decrypt_cb, ctx->decrypt_state);
424 if (res != VPX_CODEC_OK) return res;
425
426 if (!ctx->si.is_kf && !is_intra_only) return VPX_CODEC_ERROR;
427 }
428
429 if (!ctx->frame_parallel_decode) {
430 VPxWorker *const worker = ctx->frame_workers;
431 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
432 frame_worker_data->data = *data;
433 frame_worker_data->data_size = data_sz;
434 frame_worker_data->user_priv = user_priv;
435 frame_worker_data->received_frame = 1;
436
437 // Set these even if already initialized. The caller may have changed the
438 // decrypt config between frames.
439 frame_worker_data->pbi->decrypt_cb = ctx->decrypt_cb;
440 frame_worker_data->pbi->decrypt_state = ctx->decrypt_state;
441
442 worker->had_error = 0;
443 winterface->execute(worker);
444
445 // Update data pointer after decode.
446 *data = frame_worker_data->data_end;
447
448 if (worker->had_error)
449 return update_error_state(ctx, &frame_worker_data->pbi->common.error);
450
451 check_resync(ctx, frame_worker_data->pbi);
452 } else {
453 VPxWorker *const worker = &ctx->frame_workers[ctx->next_submit_worker_id];
454 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
455 // Copy context from last worker thread to next worker thread.
456 if (ctx->next_submit_worker_id != ctx->last_submit_worker_id)
457 vp9_frameworker_copy_context(
458 &ctx->frame_workers[ctx->next_submit_worker_id],
459 &ctx->frame_workers[ctx->last_submit_worker_id]);
460
461 frame_worker_data->pbi->ready_for_new_data = 0;
462 // Copy the compressed data into worker's internal buffer.
463 // TODO(hkuang): Will all the workers allocate the same size
464 // as the size of the first intra frame be better? This will
465 // avoid too many deallocate and allocate.
466 if (frame_worker_data->scratch_buffer_size < data_sz) {
467 vpx_free(frame_worker_data->scratch_buffer);
468 frame_worker_data->scratch_buffer = (uint8_t *)vpx_malloc(data_sz);
469 if (frame_worker_data->scratch_buffer == NULL) {
470 set_error_detail(ctx, "Failed to reallocate scratch buffer");
471 return VPX_CODEC_MEM_ERROR;
472 }
473 frame_worker_data->scratch_buffer_size = data_sz;
474 }
475 frame_worker_data->data_size = data_sz;
476 memcpy(frame_worker_data->scratch_buffer, *data, data_sz);
477
478 frame_worker_data->frame_decoded = 0;
479 frame_worker_data->frame_context_ready = 0;
480 frame_worker_data->received_frame = 1;
481 frame_worker_data->data = frame_worker_data->scratch_buffer;
482 frame_worker_data->user_priv = user_priv;
483
484 if (ctx->next_submit_worker_id != ctx->last_submit_worker_id)
485 ctx->last_submit_worker_id =
486 (ctx->last_submit_worker_id + 1) % ctx->num_frame_workers;
487
488 ctx->next_submit_worker_id =
489 (ctx->next_submit_worker_id + 1) % ctx->num_frame_workers;
490 --ctx->available_threads;
491 worker->had_error = 0;
492 winterface->launch(worker);
493 }
494
495 return VPX_CODEC_OK;
496 }
497
wait_worker_and_cache_frame(vpx_codec_alg_priv_t * ctx)498 static void wait_worker_and_cache_frame(vpx_codec_alg_priv_t *ctx) {
499 YV12_BUFFER_CONFIG sd;
500 vp9_ppflags_t flags = { 0, 0, 0 };
501 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
502 VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
503 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
504 ctx->next_output_worker_id =
505 (ctx->next_output_worker_id + 1) % ctx->num_frame_workers;
506 // TODO(hkuang): Add worker error handling here.
507 winterface->sync(worker);
508 frame_worker_data->received_frame = 0;
509 ++ctx->available_threads;
510
511 check_resync(ctx, frame_worker_data->pbi);
512
513 if (vp9_get_raw_frame(frame_worker_data->pbi, &sd, &flags) == 0) {
514 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
515 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
516 ctx->frame_cache[ctx->frame_cache_write].fb_idx = cm->new_fb_idx;
517 yuvconfig2image(&ctx->frame_cache[ctx->frame_cache_write].img, &sd,
518 frame_worker_data->user_priv);
519 ctx->frame_cache[ctx->frame_cache_write].img.fb_priv =
520 frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
521 ctx->frame_cache_write = (ctx->frame_cache_write + 1) % FRAME_CACHE_SIZE;
522 ++ctx->num_cache_frames;
523 }
524 }
525
decoder_decode(vpx_codec_alg_priv_t * ctx,const uint8_t * data,unsigned int data_sz,void * user_priv,long deadline)526 static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
527 const uint8_t *data, unsigned int data_sz,
528 void *user_priv, long deadline) {
529 const uint8_t *data_start = data;
530 const uint8_t *const data_end = data + data_sz;
531 vpx_codec_err_t res;
532 uint32_t frame_sizes[8];
533 int frame_count;
534
535 if (data == NULL && data_sz == 0) {
536 ctx->flushed = 1;
537 return VPX_CODEC_OK;
538 }
539
540 // Reset flushed when receiving a valid frame.
541 ctx->flushed = 0;
542
543 // Initialize the decoder workers on the first frame.
544 if (ctx->frame_workers == NULL) {
545 const vpx_codec_err_t res = init_decoder(ctx);
546 if (res != VPX_CODEC_OK) return res;
547 }
548
549 res = vp9_parse_superframe_index(data, data_sz, frame_sizes, &frame_count,
550 ctx->decrypt_cb, ctx->decrypt_state);
551 if (res != VPX_CODEC_OK) return res;
552
553 if (ctx->svc_decoding && ctx->svc_spatial_layer < frame_count - 1)
554 frame_count = ctx->svc_spatial_layer + 1;
555
556 if (ctx->frame_parallel_decode) {
557 // Decode in frame parallel mode. When decoding in this mode, the frame
558 // passed to the decoder must be either a normal frame or a superframe with
559 // superframe index so the decoder could get each frame's start position
560 // in the superframe.
561 if (frame_count > 0) {
562 int i;
563
564 for (i = 0; i < frame_count; ++i) {
565 const uint8_t *data_start_copy = data_start;
566 const uint32_t frame_size = frame_sizes[i];
567 if (data_start < data ||
568 frame_size > (uint32_t)(data_end - data_start)) {
569 set_error_detail(ctx, "Invalid frame size in index");
570 return VPX_CODEC_CORRUPT_FRAME;
571 }
572
573 if (ctx->available_threads == 0) {
574 // No more threads for decoding. Wait until the next output worker
575 // finishes decoding. Then copy the decoded frame into cache.
576 if (ctx->num_cache_frames < FRAME_CACHE_SIZE) {
577 wait_worker_and_cache_frame(ctx);
578 } else {
579 // TODO(hkuang): Add unit test to test this path.
580 set_error_detail(ctx, "Frame output cache is full.");
581 return VPX_CODEC_ERROR;
582 }
583 }
584
585 res =
586 decode_one(ctx, &data_start_copy, frame_size, user_priv, deadline);
587 if (res != VPX_CODEC_OK) return res;
588 data_start += frame_size;
589 }
590 } else {
591 if (ctx->available_threads == 0) {
592 // No more threads for decoding. Wait until the next output worker
593 // finishes decoding. Then copy the decoded frame into cache.
594 if (ctx->num_cache_frames < FRAME_CACHE_SIZE) {
595 wait_worker_and_cache_frame(ctx);
596 } else {
597 // TODO(hkuang): Add unit test to test this path.
598 set_error_detail(ctx, "Frame output cache is full.");
599 return VPX_CODEC_ERROR;
600 }
601 }
602
603 res = decode_one(ctx, &data, data_sz, user_priv, deadline);
604 if (res != VPX_CODEC_OK) return res;
605 }
606 } else {
607 // Decode in serial mode.
608 if (frame_count > 0) {
609 int i;
610
611 for (i = 0; i < frame_count; ++i) {
612 const uint8_t *data_start_copy = data_start;
613 const uint32_t frame_size = frame_sizes[i];
614 vpx_codec_err_t res;
615 if (data_start < data ||
616 frame_size > (uint32_t)(data_end - data_start)) {
617 set_error_detail(ctx, "Invalid frame size in index");
618 return VPX_CODEC_CORRUPT_FRAME;
619 }
620
621 res =
622 decode_one(ctx, &data_start_copy, frame_size, user_priv, deadline);
623 if (res != VPX_CODEC_OK) return res;
624
625 data_start += frame_size;
626 }
627 } else {
628 while (data_start < data_end) {
629 const uint32_t frame_size = (uint32_t)(data_end - data_start);
630 const vpx_codec_err_t res =
631 decode_one(ctx, &data_start, frame_size, user_priv, deadline);
632 if (res != VPX_CODEC_OK) return res;
633
634 // Account for suboptimal termination by the encoder.
635 while (data_start < data_end) {
636 const uint8_t marker =
637 read_marker(ctx->decrypt_cb, ctx->decrypt_state, data_start);
638 if (marker) break;
639 ++data_start;
640 }
641 }
642 }
643 }
644
645 return res;
646 }
647
release_last_output_frame(vpx_codec_alg_priv_t * ctx)648 static void release_last_output_frame(vpx_codec_alg_priv_t *ctx) {
649 RefCntBuffer *const frame_bufs = ctx->buffer_pool->frame_bufs;
650 // Decrease reference count of last output frame in frame parallel mode.
651 if (ctx->frame_parallel_decode && ctx->last_show_frame >= 0) {
652 BufferPool *const pool = ctx->buffer_pool;
653 lock_buffer_pool(pool);
654 decrease_ref_count(ctx->last_show_frame, frame_bufs, pool);
655 unlock_buffer_pool(pool);
656 }
657 }
658
decoder_get_frame(vpx_codec_alg_priv_t * ctx,vpx_codec_iter_t * iter)659 static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
660 vpx_codec_iter_t *iter) {
661 vpx_image_t *img = NULL;
662
663 // Only return frame when all the cpu are busy or
664 // application fluhsed the decoder in frame parallel decode.
665 if (ctx->frame_parallel_decode && ctx->available_threads > 0 &&
666 !ctx->flushed) {
667 return NULL;
668 }
669
670 // Output the frames in the cache first.
671 if (ctx->num_cache_frames > 0) {
672 release_last_output_frame(ctx);
673 ctx->last_show_frame = ctx->frame_cache[ctx->frame_cache_read].fb_idx;
674 if (ctx->need_resync) return NULL;
675 img = &ctx->frame_cache[ctx->frame_cache_read].img;
676 ctx->frame_cache_read = (ctx->frame_cache_read + 1) % FRAME_CACHE_SIZE;
677 --ctx->num_cache_frames;
678 return img;
679 }
680
681 // iter acts as a flip flop, so an image is only returned on the first
682 // call to get_frame.
683 if (*iter == NULL && ctx->frame_workers != NULL) {
684 do {
685 YV12_BUFFER_CONFIG sd;
686 vp9_ppflags_t flags = { 0, 0, 0 };
687 const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
688 VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
689 FrameWorkerData *const frame_worker_data =
690 (FrameWorkerData *)worker->data1;
691 ctx->next_output_worker_id =
692 (ctx->next_output_worker_id + 1) % ctx->num_frame_workers;
693 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
694 set_ppflags(ctx, &flags);
695 // Wait for the frame from worker thread.
696 if (winterface->sync(worker)) {
697 // Check if worker has received any frames.
698 if (frame_worker_data->received_frame == 1) {
699 ++ctx->available_threads;
700 frame_worker_data->received_frame = 0;
701 check_resync(ctx, frame_worker_data->pbi);
702 }
703 if (vp9_get_raw_frame(frame_worker_data->pbi, &sd, &flags) == 0) {
704 VP9_COMMON *const cm = &frame_worker_data->pbi->common;
705 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
706 release_last_output_frame(ctx);
707 ctx->last_show_frame = frame_worker_data->pbi->common.new_fb_idx;
708 if (ctx->need_resync) return NULL;
709 yuvconfig2image(&ctx->img, &sd, frame_worker_data->user_priv);
710 ctx->img.fb_priv = frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
711 img = &ctx->img;
712 return img;
713 }
714 } else {
715 // Decoding failed. Release the worker thread.
716 frame_worker_data->received_frame = 0;
717 ++ctx->available_threads;
718 ctx->need_resync = 1;
719 if (ctx->flushed != 1) return NULL;
720 }
721 } while (ctx->next_output_worker_id != ctx->next_submit_worker_id);
722 }
723 return NULL;
724 }
725
decoder_set_fb_fn(vpx_codec_alg_priv_t * ctx,vpx_get_frame_buffer_cb_fn_t cb_get,vpx_release_frame_buffer_cb_fn_t cb_release,void * cb_priv)726 static vpx_codec_err_t decoder_set_fb_fn(
727 vpx_codec_alg_priv_t *ctx, vpx_get_frame_buffer_cb_fn_t cb_get,
728 vpx_release_frame_buffer_cb_fn_t cb_release, void *cb_priv) {
729 if (cb_get == NULL || cb_release == NULL) {
730 return VPX_CODEC_INVALID_PARAM;
731 } else if (ctx->frame_workers == NULL) {
732 // If the decoder has already been initialized, do not accept changes to
733 // the frame buffer functions.
734 ctx->get_ext_fb_cb = cb_get;
735 ctx->release_ext_fb_cb = cb_release;
736 ctx->ext_priv = cb_priv;
737 return VPX_CODEC_OK;
738 }
739
740 return VPX_CODEC_ERROR;
741 }
742
ctrl_set_reference(vpx_codec_alg_priv_t * ctx,va_list args)743 static vpx_codec_err_t ctrl_set_reference(vpx_codec_alg_priv_t *ctx,
744 va_list args) {
745 vpx_ref_frame_t *const data = va_arg(args, vpx_ref_frame_t *);
746
747 // Only support this function in serial decode.
748 if (ctx->frame_parallel_decode) {
749 set_error_detail(ctx, "Not supported in frame parallel decode");
750 return VPX_CODEC_INCAPABLE;
751 }
752
753 if (data) {
754 vpx_ref_frame_t *const frame = (vpx_ref_frame_t *)data;
755 YV12_BUFFER_CONFIG sd;
756 VPxWorker *const worker = ctx->frame_workers;
757 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
758 image2yuvconfig(&frame->img, &sd);
759 return vp9_set_reference_dec(&frame_worker_data->pbi->common,
760 ref_frame_to_vp9_reframe(frame->frame_type),
761 &sd);
762 } else {
763 return VPX_CODEC_INVALID_PARAM;
764 }
765 }
766
ctrl_copy_reference(vpx_codec_alg_priv_t * ctx,va_list args)767 static vpx_codec_err_t ctrl_copy_reference(vpx_codec_alg_priv_t *ctx,
768 va_list args) {
769 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
770
771 // Only support this function in serial decode.
772 if (ctx->frame_parallel_decode) {
773 set_error_detail(ctx, "Not supported in frame parallel decode");
774 return VPX_CODEC_INCAPABLE;
775 }
776
777 if (data) {
778 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
779 YV12_BUFFER_CONFIG sd;
780 VPxWorker *const worker = ctx->frame_workers;
781 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
782 image2yuvconfig(&frame->img, &sd);
783 return vp9_copy_reference_dec(frame_worker_data->pbi,
784 (VP9_REFFRAME)frame->frame_type, &sd);
785 } else {
786 return VPX_CODEC_INVALID_PARAM;
787 }
788 }
789
ctrl_get_reference(vpx_codec_alg_priv_t * ctx,va_list args)790 static vpx_codec_err_t ctrl_get_reference(vpx_codec_alg_priv_t *ctx,
791 va_list args) {
792 vp9_ref_frame_t *data = va_arg(args, vp9_ref_frame_t *);
793
794 // Only support this function in serial decode.
795 if (ctx->frame_parallel_decode) {
796 set_error_detail(ctx, "Not supported in frame parallel decode");
797 return VPX_CODEC_INCAPABLE;
798 }
799
800 if (data) {
801 YV12_BUFFER_CONFIG *fb;
802 VPxWorker *const worker = ctx->frame_workers;
803 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
804 fb = get_ref_frame(&frame_worker_data->pbi->common, data->idx);
805 if (fb == NULL) return VPX_CODEC_ERROR;
806 yuvconfig2image(&data->img, fb, NULL);
807 return VPX_CODEC_OK;
808 } else {
809 return VPX_CODEC_INVALID_PARAM;
810 }
811 }
812
ctrl_set_postproc(vpx_codec_alg_priv_t * ctx,va_list args)813 static vpx_codec_err_t ctrl_set_postproc(vpx_codec_alg_priv_t *ctx,
814 va_list args) {
815 #if CONFIG_VP9_POSTPROC
816 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
817
818 if (data) {
819 ctx->postproc_cfg_set = 1;
820 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
821 return VPX_CODEC_OK;
822 } else {
823 return VPX_CODEC_INVALID_PARAM;
824 }
825 #else
826 (void)ctx;
827 (void)args;
828 return VPX_CODEC_INCAPABLE;
829 #endif
830 }
831
ctrl_get_quantizer(vpx_codec_alg_priv_t * ctx,va_list args)832 static vpx_codec_err_t ctrl_get_quantizer(vpx_codec_alg_priv_t *ctx,
833 va_list args) {
834 int *const arg = va_arg(args, int *);
835 if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
836 *arg =
837 ((FrameWorkerData *)ctx->frame_workers[0].data1)->pbi->common.base_qindex;
838 return VPX_CODEC_OK;
839 }
840
ctrl_get_last_ref_updates(vpx_codec_alg_priv_t * ctx,va_list args)841 static vpx_codec_err_t ctrl_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
842 va_list args) {
843 int *const update_info = va_arg(args, int *);
844
845 // Only support this function in serial decode.
846 if (ctx->frame_parallel_decode) {
847 set_error_detail(ctx, "Not supported in frame parallel decode");
848 return VPX_CODEC_INCAPABLE;
849 }
850
851 if (update_info) {
852 if (ctx->frame_workers) {
853 VPxWorker *const worker = ctx->frame_workers;
854 FrameWorkerData *const frame_worker_data =
855 (FrameWorkerData *)worker->data1;
856 *update_info = frame_worker_data->pbi->refresh_frame_flags;
857 return VPX_CODEC_OK;
858 } else {
859 return VPX_CODEC_ERROR;
860 }
861 }
862
863 return VPX_CODEC_INVALID_PARAM;
864 }
865
ctrl_get_frame_corrupted(vpx_codec_alg_priv_t * ctx,va_list args)866 static vpx_codec_err_t ctrl_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
867 va_list args) {
868 int *corrupted = va_arg(args, int *);
869
870 if (corrupted) {
871 if (ctx->frame_workers) {
872 VPxWorker *const worker = ctx->frame_workers;
873 FrameWorkerData *const frame_worker_data =
874 (FrameWorkerData *)worker->data1;
875 RefCntBuffer *const frame_bufs =
876 frame_worker_data->pbi->common.buffer_pool->frame_bufs;
877 if (frame_worker_data->pbi->common.frame_to_show == NULL)
878 return VPX_CODEC_ERROR;
879 if (ctx->last_show_frame >= 0)
880 *corrupted = frame_bufs[ctx->last_show_frame].buf.corrupted;
881 return VPX_CODEC_OK;
882 } else {
883 return VPX_CODEC_ERROR;
884 }
885 }
886
887 return VPX_CODEC_INVALID_PARAM;
888 }
889
ctrl_get_frame_size(vpx_codec_alg_priv_t * ctx,va_list args)890 static vpx_codec_err_t ctrl_get_frame_size(vpx_codec_alg_priv_t *ctx,
891 va_list args) {
892 int *const frame_size = va_arg(args, int *);
893
894 // Only support this function in serial decode.
895 if (ctx->frame_parallel_decode) {
896 set_error_detail(ctx, "Not supported in frame parallel decode");
897 return VPX_CODEC_INCAPABLE;
898 }
899
900 if (frame_size) {
901 if (ctx->frame_workers) {
902 VPxWorker *const worker = ctx->frame_workers;
903 FrameWorkerData *const frame_worker_data =
904 (FrameWorkerData *)worker->data1;
905 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
906 frame_size[0] = cm->width;
907 frame_size[1] = cm->height;
908 return VPX_CODEC_OK;
909 } else {
910 return VPX_CODEC_ERROR;
911 }
912 }
913
914 return VPX_CODEC_INVALID_PARAM;
915 }
916
ctrl_get_render_size(vpx_codec_alg_priv_t * ctx,va_list args)917 static vpx_codec_err_t ctrl_get_render_size(vpx_codec_alg_priv_t *ctx,
918 va_list args) {
919 int *const render_size = va_arg(args, int *);
920
921 // Only support this function in serial decode.
922 if (ctx->frame_parallel_decode) {
923 set_error_detail(ctx, "Not supported in frame parallel decode");
924 return VPX_CODEC_INCAPABLE;
925 }
926
927 if (render_size) {
928 if (ctx->frame_workers) {
929 VPxWorker *const worker = ctx->frame_workers;
930 FrameWorkerData *const frame_worker_data =
931 (FrameWorkerData *)worker->data1;
932 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
933 render_size[0] = cm->render_width;
934 render_size[1] = cm->render_height;
935 return VPX_CODEC_OK;
936 } else {
937 return VPX_CODEC_ERROR;
938 }
939 }
940
941 return VPX_CODEC_INVALID_PARAM;
942 }
943
ctrl_get_bit_depth(vpx_codec_alg_priv_t * ctx,va_list args)944 static vpx_codec_err_t ctrl_get_bit_depth(vpx_codec_alg_priv_t *ctx,
945 va_list args) {
946 unsigned int *const bit_depth = va_arg(args, unsigned int *);
947 VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
948
949 if (bit_depth) {
950 if (worker) {
951 FrameWorkerData *const frame_worker_data =
952 (FrameWorkerData *)worker->data1;
953 const VP9_COMMON *const cm = &frame_worker_data->pbi->common;
954 *bit_depth = cm->bit_depth;
955 return VPX_CODEC_OK;
956 } else {
957 return VPX_CODEC_ERROR;
958 }
959 }
960
961 return VPX_CODEC_INVALID_PARAM;
962 }
963
ctrl_set_invert_tile_order(vpx_codec_alg_priv_t * ctx,va_list args)964 static vpx_codec_err_t ctrl_set_invert_tile_order(vpx_codec_alg_priv_t *ctx,
965 va_list args) {
966 ctx->invert_tile_order = va_arg(args, int);
967 return VPX_CODEC_OK;
968 }
969
ctrl_set_decryptor(vpx_codec_alg_priv_t * ctx,va_list args)970 static vpx_codec_err_t ctrl_set_decryptor(vpx_codec_alg_priv_t *ctx,
971 va_list args) {
972 vpx_decrypt_init *init = va_arg(args, vpx_decrypt_init *);
973 ctx->decrypt_cb = init ? init->decrypt_cb : NULL;
974 ctx->decrypt_state = init ? init->decrypt_state : NULL;
975 return VPX_CODEC_OK;
976 }
977
ctrl_set_byte_alignment(vpx_codec_alg_priv_t * ctx,va_list args)978 static vpx_codec_err_t ctrl_set_byte_alignment(vpx_codec_alg_priv_t *ctx,
979 va_list args) {
980 const int legacy_byte_alignment = 0;
981 const int min_byte_alignment = 32;
982 const int max_byte_alignment = 1024;
983 const int byte_alignment = va_arg(args, int);
984
985 if (byte_alignment != legacy_byte_alignment &&
986 (byte_alignment < min_byte_alignment ||
987 byte_alignment > max_byte_alignment ||
988 (byte_alignment & (byte_alignment - 1)) != 0))
989 return VPX_CODEC_INVALID_PARAM;
990
991 ctx->byte_alignment = byte_alignment;
992 if (ctx->frame_workers) {
993 VPxWorker *const worker = ctx->frame_workers;
994 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
995 frame_worker_data->pbi->common.byte_alignment = byte_alignment;
996 }
997 return VPX_CODEC_OK;
998 }
999
ctrl_set_skip_loop_filter(vpx_codec_alg_priv_t * ctx,va_list args)1000 static vpx_codec_err_t ctrl_set_skip_loop_filter(vpx_codec_alg_priv_t *ctx,
1001 va_list args) {
1002 ctx->skip_loop_filter = va_arg(args, int);
1003
1004 if (ctx->frame_workers) {
1005 VPxWorker *const worker = ctx->frame_workers;
1006 FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
1007 frame_worker_data->pbi->common.skip_loop_filter = ctx->skip_loop_filter;
1008 }
1009
1010 return VPX_CODEC_OK;
1011 }
1012
ctrl_set_spatial_layer_svc(vpx_codec_alg_priv_t * ctx,va_list args)1013 static vpx_codec_err_t ctrl_set_spatial_layer_svc(vpx_codec_alg_priv_t *ctx,
1014 va_list args) {
1015 ctx->svc_decoding = 1;
1016 ctx->svc_spatial_layer = va_arg(args, int);
1017 if (ctx->svc_spatial_layer < 0)
1018 return VPX_CODEC_INVALID_PARAM;
1019 else
1020 return VPX_CODEC_OK;
1021 }
1022
1023 static vpx_codec_ctrl_fn_map_t decoder_ctrl_maps[] = {
1024 { VP8_COPY_REFERENCE, ctrl_copy_reference },
1025
1026 // Setters
1027 { VP8_SET_REFERENCE, ctrl_set_reference },
1028 { VP8_SET_POSTPROC, ctrl_set_postproc },
1029 { VP9_INVERT_TILE_DECODE_ORDER, ctrl_set_invert_tile_order },
1030 { VPXD_SET_DECRYPTOR, ctrl_set_decryptor },
1031 { VP9_SET_BYTE_ALIGNMENT, ctrl_set_byte_alignment },
1032 { VP9_SET_SKIP_LOOP_FILTER, ctrl_set_skip_loop_filter },
1033 { VP9_DECODE_SVC_SPATIAL_LAYER, ctrl_set_spatial_layer_svc },
1034
1035 // Getters
1036 { VPXD_GET_LAST_QUANTIZER, ctrl_get_quantizer },
1037 { VP8D_GET_LAST_REF_UPDATES, ctrl_get_last_ref_updates },
1038 { VP8D_GET_FRAME_CORRUPTED, ctrl_get_frame_corrupted },
1039 { VP9_GET_REFERENCE, ctrl_get_reference },
1040 { VP9D_GET_DISPLAY_SIZE, ctrl_get_render_size },
1041 { VP9D_GET_BIT_DEPTH, ctrl_get_bit_depth },
1042 { VP9D_GET_FRAME_SIZE, ctrl_get_frame_size },
1043
1044 { -1, NULL },
1045 };
1046
1047 #ifndef VERSION_STRING
1048 #define VERSION_STRING
1049 #endif
1050 CODEC_INTERFACE(vpx_codec_vp9_dx) = {
1051 "WebM Project VP9 Decoder" VERSION_STRING,
1052 VPX_CODEC_INTERNAL_ABI_VERSION,
1053 #if CONFIG_VP9_HIGHBITDEPTH
1054 VPX_CODEC_CAP_HIGHBITDEPTH |
1055 #endif
1056 VPX_CODEC_CAP_DECODER | VP9_CAP_POSTPROC |
1057 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER, // vpx_codec_caps_t
1058 decoder_init, // vpx_codec_init_fn_t
1059 decoder_destroy, // vpx_codec_destroy_fn_t
1060 decoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
1061 {
1062 // NOLINT
1063 decoder_peek_si, // vpx_codec_peek_si_fn_t
1064 decoder_get_si, // vpx_codec_get_si_fn_t
1065 decoder_decode, // vpx_codec_decode_fn_t
1066 decoder_get_frame, // vpx_codec_frame_get_fn_t
1067 decoder_set_fb_fn, // vpx_codec_set_fb_fn_t
1068 },
1069 {
1070 // NOLINT
1071 0,
1072 NULL, // vpx_codec_enc_cfg_map_t
1073 NULL, // vpx_codec_encode_fn_t
1074 NULL, // vpx_codec_get_cx_data_fn_t
1075 NULL, // vpx_codec_enc_config_set_fn_t
1076 NULL, // vpx_codec_get_global_headers_fn_t
1077 NULL, // vpx_codec_get_preview_frame_fn_t
1078 NULL // vpx_codec_enc_mr_get_mem_loc_fn_t
1079 }
1080 };
1081