Thu, 15 Jan 2015 15:59:08 +0100
Implement a real Private Browsing Mode condition by changing the API/ABI;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
12 #include <stdlib.h>
13 #include <string.h>
14 #include "vpx/vpx_decoder.h"
15 #include "vpx/vp8dx.h"
16 #include "vpx/internal/vpx_codec_internal.h"
17 #include "./vpx_version.h"
18 #include "vp9/decoder/vp9_onyxd.h"
19 #include "vp9/decoder/vp9_onyxd_int.h"
20 #include "vp9/decoder/vp9_read_bit_buffer.h"
21 #include "vp9/vp9_iface_common.h"
23 #define VP9_CAP_POSTPROC (CONFIG_VP9_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
24 typedef vpx_codec_stream_info_t vp9_stream_info_t;
26 /* Structures for handling memory allocations */
27 typedef enum {
28 VP9_SEG_ALG_PRIV = 256,
29 VP9_SEG_MAX
30 } mem_seg_id_t;
31 #define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
33 static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si,
34 vpx_codec_flags_t flags);
36 static const mem_req_t vp9_mem_req_segs[] = {
37 {VP9_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, priv_sz},
38 {VP9_SEG_MAX, 0, 0, 0, NULL}
39 };
41 struct vpx_codec_alg_priv {
42 vpx_codec_priv_t base;
43 vpx_codec_mmap_t mmaps[NELEMENTS(vp9_mem_req_segs) - 1];
44 vpx_codec_dec_cfg_t cfg;
45 vp9_stream_info_t si;
46 int defer_alloc;
47 int decoder_init;
48 VP9D_PTR pbi;
49 int postproc_cfg_set;
50 vp8_postproc_cfg_t postproc_cfg;
51 #if CONFIG_POSTPROC_VISUALIZER
52 unsigned int dbg_postproc_flag;
53 int dbg_color_ref_frame_flag;
54 int dbg_color_mb_modes_flag;
55 int dbg_color_b_modes_flag;
56 int dbg_display_mv_flag;
57 #endif
58 vpx_image_t img;
59 int img_setup;
60 int img_avail;
61 int invert_tile_order;
62 };
64 static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si,
65 vpx_codec_flags_t flags) {
66 /* Although this declaration is constant, we can't use it in the requested
67 * segments list because we want to define the requested segments list
68 * before defining the private type (so that the number of memory maps is
69 * known)
70 */
71 (void)si;
72 return sizeof(vpx_codec_alg_priv_t);
73 }
75 static void vp9_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap) {
76 int i;
78 ctx->priv = mmap->base;
79 ctx->priv->sz = sizeof(*ctx->priv);
80 ctx->priv->iface = ctx->iface;
81 ctx->priv->alg_priv = mmap->base;
83 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
84 ctx->priv->alg_priv->mmaps[i].id = vp9_mem_req_segs[i].id;
86 ctx->priv->alg_priv->mmaps[0] = *mmap;
87 ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
88 ctx->priv->init_flags = ctx->init_flags;
90 if (ctx->config.dec) {
91 /* Update the reference to the config structure to an internal copy. */
92 ctx->priv->alg_priv->cfg = *ctx->config.dec;
93 ctx->config.dec = &ctx->priv->alg_priv->cfg;
94 }
95 }
97 static void vp9_finalize_mmaps(vpx_codec_alg_priv_t *ctx) {
98 /* nothing to clean up */
99 }
101 static vpx_codec_err_t vp9_init(vpx_codec_ctx_t *ctx,
102 vpx_codec_priv_enc_mr_cfg_t *data) {
103 vpx_codec_err_t res = VPX_CODEC_OK;
105 /* This function only allocates space for the vpx_codec_alg_priv_t
106 * structure. More memory may be required at the time the stream
107 * information becomes known.
108 */
109 if (!ctx->priv) {
110 vpx_codec_mmap_t mmap;
112 mmap.id = vp9_mem_req_segs[0].id;
113 mmap.sz = sizeof(vpx_codec_alg_priv_t);
114 mmap.align = vp9_mem_req_segs[0].align;
115 mmap.flags = vp9_mem_req_segs[0].flags;
117 res = vpx_mmap_alloc(&mmap);
119 if (!res) {
120 vp9_init_ctx(ctx, &mmap);
122 ctx->priv->alg_priv->defer_alloc = 1;
123 /*post processing level initialized to do nothing */
124 }
125 }
127 return res;
128 }
130 static vpx_codec_err_t vp9_destroy(vpx_codec_alg_priv_t *ctx) {
131 int i;
133 vp9_remove_decompressor(ctx->pbi);
135 for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--) {
136 if (ctx->mmaps[i].dtor)
137 ctx->mmaps[i].dtor(&ctx->mmaps[i]);
138 }
140 return VPX_CODEC_OK;
141 }
143 static vpx_codec_err_t vp9_peek_si(const uint8_t *data,
144 unsigned int data_sz,
145 vpx_codec_stream_info_t *si) {
146 if (data_sz <= 8) return VPX_CODEC_UNSUP_BITSTREAM;
147 if (data + data_sz <= data) return VPX_CODEC_INVALID_PARAM;
149 si->is_kf = 0;
150 si->w = si->h = 0;
152 {
153 struct vp9_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
154 const int frame_marker = vp9_rb_read_literal(&rb, 2);
155 const int version = vp9_rb_read_bit(&rb) | (vp9_rb_read_bit(&rb) << 1);
156 if (frame_marker != 0x2) return VPX_CODEC_UNSUP_BITSTREAM;
157 #if CONFIG_NON420
158 if (version > 1) return VPX_CODEC_UNSUP_BITSTREAM;
159 #else
160 if (version != 0) return VPX_CODEC_UNSUP_BITSTREAM;
161 #endif
163 if (vp9_rb_read_bit(&rb)) { // show an existing frame
164 return VPX_CODEC_OK;
165 }
167 si->is_kf = !vp9_rb_read_bit(&rb);
168 if (si->is_kf) {
169 const int sRGB = 7;
170 int colorspace;
172 rb.bit_offset += 1; // show frame
173 rb.bit_offset += 1; // error resilient
175 if (vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_0 ||
176 vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_1 ||
177 vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_2) {
178 return VPX_CODEC_UNSUP_BITSTREAM;
179 }
181 colorspace = vp9_rb_read_literal(&rb, 3);
182 if (colorspace != sRGB) {
183 rb.bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range
184 if (version == 1) {
185 rb.bit_offset += 2; // subsampling x/y
186 rb.bit_offset += 1; // has extra plane
187 }
188 } else {
189 if (version == 1) {
190 rb.bit_offset += 1; // has extra plane
191 } else {
192 // RGB is only available in version 1
193 return VPX_CODEC_UNSUP_BITSTREAM;
194 }
195 }
197 // TODO(jzern): these are available on non-keyframes in intra only mode.
198 si->w = vp9_rb_read_literal(&rb, 16) + 1;
199 si->h = vp9_rb_read_literal(&rb, 16) + 1;
200 }
201 }
203 return VPX_CODEC_OK;
204 }
206 static vpx_codec_err_t vp9_get_si(vpx_codec_alg_priv_t *ctx,
207 vpx_codec_stream_info_t *si) {
208 unsigned int sz;
210 if (si->sz >= sizeof(vp9_stream_info_t))
211 sz = sizeof(vp9_stream_info_t);
212 else
213 sz = sizeof(vpx_codec_stream_info_t);
215 memcpy(si, &ctx->si, sz);
216 si->sz = sz;
218 return VPX_CODEC_OK;
219 }
222 static vpx_codec_err_t
223 update_error_state(vpx_codec_alg_priv_t *ctx,
224 const struct vpx_internal_error_info *error) {
225 vpx_codec_err_t res;
227 if ((res = error->error_code))
228 ctx->base.err_detail = error->has_detail
229 ? error->detail
230 : NULL;
232 return res;
233 }
235 static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx,
236 const uint8_t **data,
237 unsigned int data_sz,
238 void *user_priv,
239 long deadline) {
240 vpx_codec_err_t res = VPX_CODEC_OK;
242 ctx->img_avail = 0;
244 /* Determine the stream parameters. Note that we rely on peek_si to
245 * validate that we have a buffer that does not wrap around the top
246 * of the heap.
247 */
248 if (!ctx->si.h)
249 res = ctx->base.iface->dec.peek_si(*data, data_sz, &ctx->si);
252 /* Perform deferred allocations, if required */
253 if (!res && ctx->defer_alloc) {
254 int i;
256 for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) {
257 vpx_codec_dec_cfg_t cfg;
259 cfg.w = ctx->si.w;
260 cfg.h = ctx->si.h;
261 ctx->mmaps[i].id = vp9_mem_req_segs[i].id;
262 ctx->mmaps[i].sz = vp9_mem_req_segs[i].sz;
263 ctx->mmaps[i].align = vp9_mem_req_segs[i].align;
264 ctx->mmaps[i].flags = vp9_mem_req_segs[i].flags;
266 if (!ctx->mmaps[i].sz)
267 ctx->mmaps[i].sz = vp9_mem_req_segs[i].calc_sz(&cfg,
268 ctx->base.init_flags);
270 res = vpx_mmap_alloc(&ctx->mmaps[i]);
271 }
273 if (!res)
274 vp9_finalize_mmaps(ctx);
276 ctx->defer_alloc = 0;
277 }
279 /* Initialize the decoder instance on the first frame*/
280 if (!res && !ctx->decoder_init) {
281 res = vpx_validate_mmaps(&ctx->si, ctx->mmaps,
282 vp9_mem_req_segs, NELEMENTS(vp9_mem_req_segs),
283 ctx->base.init_flags);
285 if (!res) {
286 VP9D_CONFIG oxcf;
287 VP9D_PTR optr;
289 vp9_initialize_dec();
291 oxcf.width = ctx->si.w;
292 oxcf.height = ctx->si.h;
293 oxcf.version = 9;
294 oxcf.postprocess = 0;
295 oxcf.max_threads = ctx->cfg.threads;
296 oxcf.inv_tile_order = ctx->invert_tile_order;
297 optr = vp9_create_decompressor(&oxcf);
299 /* If postprocessing was enabled by the application and a
300 * configuration has not been provided, default it.
301 */
302 if (!ctx->postproc_cfg_set
303 && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) {
304 ctx->postproc_cfg.post_proc_flag =
305 VP8_DEBLOCK | VP8_DEMACROBLOCK;
306 ctx->postproc_cfg.deblocking_level = 4;
307 ctx->postproc_cfg.noise_level = 0;
308 }
310 if (!optr)
311 res = VPX_CODEC_ERROR;
312 else
313 ctx->pbi = optr;
314 }
316 ctx->decoder_init = 1;
317 }
319 if (!res && ctx->pbi) {
320 YV12_BUFFER_CONFIG sd;
321 int64_t time_stamp = 0, time_end_stamp = 0;
322 vp9_ppflags_t flags = {0};
324 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) {
325 flags.post_proc_flag =
326 #if CONFIG_POSTPROC_VISUALIZER
327 ((ctx->dbg_color_ref_frame_flag != 0) ?
328 VP9D_DEBUG_CLR_FRM_REF_BLKS : 0)
329 | ((ctx->dbg_color_mb_modes_flag != 0) ?
330 VP9D_DEBUG_CLR_BLK_MODES : 0)
331 | ((ctx->dbg_color_b_modes_flag != 0) ?
332 VP9D_DEBUG_CLR_BLK_MODES : 0)
333 | ((ctx->dbg_display_mv_flag != 0) ?
334 VP9D_DEBUG_DRAW_MV : 0)
335 |
336 #endif
337 ctx->postproc_cfg.post_proc_flag;
339 flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
340 flags.noise_level = ctx->postproc_cfg.noise_level;
341 #if CONFIG_POSTPROC_VISUALIZER
342 flags.display_ref_frame_flag = ctx->dbg_color_ref_frame_flag;
343 flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag;
344 flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag;
345 flags.display_mv_flag = ctx->dbg_display_mv_flag;
346 #endif
347 }
349 if (vp9_receive_compressed_data(ctx->pbi, data_sz, data, deadline)) {
350 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
351 res = update_error_state(ctx, &pbi->common.error);
352 }
354 if (!res && 0 == vp9_get_raw_frame(ctx->pbi, &sd, &time_stamp,
355 &time_end_stamp, &flags)) {
356 yuvconfig2image(&ctx->img, &sd, user_priv);
357 ctx->img_avail = 1;
358 }
359 }
361 return res;
362 }
364 static void parse_superframe_index(const uint8_t *data,
365 size_t data_sz,
366 uint32_t sizes[8],
367 int *count) {
368 uint8_t marker;
370 assert(data_sz);
371 marker = data[data_sz - 1];
372 *count = 0;
374 if ((marker & 0xe0) == 0xc0) {
375 const uint32_t frames = (marker & 0x7) + 1;
376 const uint32_t mag = ((marker >> 3) & 0x3) + 1;
377 const size_t index_sz = 2 + mag * frames;
379 if (data_sz >= index_sz && data[data_sz - index_sz] == marker) {
380 // found a valid superframe index
381 uint32_t i, j;
382 const uint8_t *x = data + data_sz - index_sz + 1;
384 for (i = 0; i < frames; i++) {
385 uint32_t this_sz = 0;
387 for (j = 0; j < mag; j++)
388 this_sz |= (*x++) << (j * 8);
389 sizes[i] = this_sz;
390 }
392 *count = frames;
393 }
394 }
395 }
397 static vpx_codec_err_t vp9_decode(vpx_codec_alg_priv_t *ctx,
398 const uint8_t *data,
399 unsigned int data_sz,
400 void *user_priv,
401 long deadline) {
402 const uint8_t *data_start = data;
403 const uint8_t *data_end = data + data_sz;
404 vpx_codec_err_t res = 0;
405 uint32_t sizes[8];
406 int frames_this_pts, frame_count = 0;
408 if (data == NULL || data_sz == 0) return VPX_CODEC_INVALID_PARAM;
410 parse_superframe_index(data, data_sz, sizes, &frames_this_pts);
412 do {
413 // Skip over the superframe index, if present
414 if (data_sz && (*data_start & 0xe0) == 0xc0) {
415 const uint8_t marker = *data_start;
416 const uint32_t frames = (marker & 0x7) + 1;
417 const uint32_t mag = ((marker >> 3) & 0x3) + 1;
418 const uint32_t index_sz = 2 + mag * frames;
420 if (data_sz >= index_sz && data_start[index_sz - 1] == marker) {
421 data_start += index_sz;
422 data_sz -= index_sz;
423 if (data_start < data_end)
424 continue;
425 else
426 break;
427 }
428 }
430 // Use the correct size for this frame, if an index is present.
431 if (frames_this_pts) {
432 uint32_t this_sz = sizes[frame_count];
434 if (data_sz < this_sz) {
435 ctx->base.err_detail = "Invalid frame size in index";
436 return VPX_CODEC_CORRUPT_FRAME;
437 }
439 data_sz = this_sz;
440 frame_count++;
441 }
443 res = decode_one(ctx, &data_start, data_sz, user_priv, deadline);
444 assert(data_start >= data);
445 assert(data_start <= data_end);
447 /* Early exit if there was a decode error */
448 if (res)
449 break;
451 /* Account for suboptimal termination by the encoder. */
452 while (data_start < data_end && *data_start == 0)
453 data_start++;
455 data_sz = data_end - data_start;
456 } while (data_start < data_end);
457 return res;
458 }
460 static vpx_image_t *vp9_get_frame(vpx_codec_alg_priv_t *ctx,
461 vpx_codec_iter_t *iter) {
462 vpx_image_t *img = NULL;
464 if (ctx->img_avail) {
465 /* iter acts as a flip flop, so an image is only returned on the first
466 * call to get_frame.
467 */
468 if (!(*iter)) {
469 img = &ctx->img;
470 *iter = img;
471 }
472 }
473 ctx->img_avail = 0;
475 return img;
476 }
478 static vpx_codec_err_t vp9_xma_get_mmap(const vpx_codec_ctx_t *ctx,
479 vpx_codec_mmap_t *mmap,
480 vpx_codec_iter_t *iter) {
481 vpx_codec_err_t res;
482 const mem_req_t *seg_iter = *iter;
484 /* Get address of next segment request */
485 do {
486 if (!seg_iter)
487 seg_iter = vp9_mem_req_segs;
488 else if (seg_iter->id != VP9_SEG_MAX)
489 seg_iter++;
491 *iter = (vpx_codec_iter_t)seg_iter;
493 if (seg_iter->id != VP9_SEG_MAX) {
494 mmap->id = seg_iter->id;
495 mmap->sz = seg_iter->sz;
496 mmap->align = seg_iter->align;
497 mmap->flags = seg_iter->flags;
499 if (!seg_iter->sz)
500 mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
502 res = VPX_CODEC_OK;
503 } else {
504 res = VPX_CODEC_LIST_END;
505 }
506 } while (!mmap->sz && res != VPX_CODEC_LIST_END);
508 return res;
509 }
511 static vpx_codec_err_t vp9_xma_set_mmap(vpx_codec_ctx_t *ctx,
512 const vpx_codec_mmap_t *mmap) {
513 vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
514 int i, done;
516 if (!ctx->priv) {
517 if (mmap->id == VP9_SEG_ALG_PRIV) {
518 if (!ctx->priv) {
519 vp9_init_ctx(ctx, mmap);
520 res = VPX_CODEC_OK;
521 }
522 }
523 }
525 done = 1;
527 if (!res && ctx->priv->alg_priv) {
528 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) {
529 if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
530 if (!ctx->priv->alg_priv->mmaps[i].base) {
531 ctx->priv->alg_priv->mmaps[i] = *mmap;
532 res = VPX_CODEC_OK;
533 }
535 done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
536 }
537 }
539 if (done && !res) {
540 vp9_finalize_mmaps(ctx->priv->alg_priv);
541 res = ctx->iface->init(ctx, NULL);
542 }
544 return res;
545 }
547 static vpx_codec_err_t set_reference(vpx_codec_alg_priv_t *ctx,
548 int ctr_id,
549 va_list args) {
550 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
552 if (data) {
553 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
554 YV12_BUFFER_CONFIG sd;
556 image2yuvconfig(&frame->img, &sd);
558 return vp9_set_reference_dec(ctx->pbi,
559 (VP9_REFFRAME)frame->frame_type, &sd);
560 } else {
561 return VPX_CODEC_INVALID_PARAM;
562 }
563 }
565 static vpx_codec_err_t copy_reference(vpx_codec_alg_priv_t *ctx,
566 int ctr_id,
567 va_list args) {
568 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
570 if (data) {
571 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
572 YV12_BUFFER_CONFIG sd;
574 image2yuvconfig(&frame->img, &sd);
576 return vp9_copy_reference_dec(ctx->pbi,
577 (VP9_REFFRAME)frame->frame_type, &sd);
578 } else {
579 return VPX_CODEC_INVALID_PARAM;
580 }
581 }
583 static vpx_codec_err_t get_reference(vpx_codec_alg_priv_t *ctx,
584 int ctr_id,
585 va_list args) {
586 vp9_ref_frame_t *data = va_arg(args, vp9_ref_frame_t *);
588 if (data) {
589 YV12_BUFFER_CONFIG* fb;
591 vp9_get_reference_dec(ctx->pbi, data->idx, &fb);
592 yuvconfig2image(&data->img, fb, NULL);
593 return VPX_CODEC_OK;
594 } else {
595 return VPX_CODEC_INVALID_PARAM;
596 }
597 }
599 static vpx_codec_err_t set_postproc(vpx_codec_alg_priv_t *ctx,
600 int ctr_id,
601 va_list args) {
602 #if CONFIG_VP9_POSTPROC
603 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
605 if (data) {
606 ctx->postproc_cfg_set = 1;
607 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
608 return VPX_CODEC_OK;
609 } else {
610 return VPX_CODEC_INVALID_PARAM;
611 }
612 #else
613 return VPX_CODEC_INCAPABLE;
614 #endif
615 }
617 static vpx_codec_err_t set_dbg_options(vpx_codec_alg_priv_t *ctx,
618 int ctrl_id,
619 va_list args) {
620 #if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
621 int data = va_arg(args, int);
623 #define MAP(id, var) case id: var = data; break;
625 switch (ctrl_id) {
626 MAP(VP8_SET_DBG_COLOR_REF_FRAME, ctx->dbg_color_ref_frame_flag);
627 MAP(VP8_SET_DBG_COLOR_MB_MODES, ctx->dbg_color_mb_modes_flag);
628 MAP(VP8_SET_DBG_COLOR_B_MODES, ctx->dbg_color_b_modes_flag);
629 MAP(VP8_SET_DBG_DISPLAY_MV, ctx->dbg_display_mv_flag);
630 }
632 return VPX_CODEC_OK;
633 #else
634 return VPX_CODEC_INCAPABLE;
635 #endif
636 }
638 static vpx_codec_err_t get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
639 int ctrl_id,
640 va_list args) {
641 int *update_info = va_arg(args, int *);
642 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
644 if (update_info) {
645 *update_info = pbi->refresh_frame_flags;
647 return VPX_CODEC_OK;
648 } else {
649 return VPX_CODEC_INVALID_PARAM;
650 }
651 }
654 static vpx_codec_err_t get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
655 int ctrl_id,
656 va_list args) {
657 int *corrupted = va_arg(args, int *);
659 if (corrupted) {
660 VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
661 if (pbi)
662 *corrupted = pbi->common.frame_to_show->corrupted;
663 else
664 return VPX_CODEC_ERROR;
665 return VPX_CODEC_OK;
666 } else {
667 return VPX_CODEC_INVALID_PARAM;
668 }
669 }
671 static vpx_codec_err_t set_invert_tile_order(vpx_codec_alg_priv_t *ctx,
672 int ctr_id,
673 va_list args) {
674 ctx->invert_tile_order = va_arg(args, int);
675 return VPX_CODEC_OK;
676 }
678 static vpx_codec_ctrl_fn_map_t ctf_maps[] = {
679 {VP8_SET_REFERENCE, set_reference},
680 {VP8_COPY_REFERENCE, copy_reference},
681 {VP8_SET_POSTPROC, set_postproc},
682 {VP8_SET_DBG_COLOR_REF_FRAME, set_dbg_options},
683 {VP8_SET_DBG_COLOR_MB_MODES, set_dbg_options},
684 {VP8_SET_DBG_COLOR_B_MODES, set_dbg_options},
685 {VP8_SET_DBG_DISPLAY_MV, set_dbg_options},
686 {VP8D_GET_LAST_REF_UPDATES, get_last_ref_updates},
687 {VP8D_GET_FRAME_CORRUPTED, get_frame_corrupted},
688 {VP9_GET_REFERENCE, get_reference},
689 {VP9_INVERT_TILE_DECODE_ORDER, set_invert_tile_order},
690 { -1, NULL},
691 };
694 #ifndef VERSION_STRING
695 #define VERSION_STRING
696 #endif
697 CODEC_INTERFACE(vpx_codec_vp9_dx) = {
698 "WebM Project VP9 Decoder" VERSION_STRING,
699 VPX_CODEC_INTERNAL_ABI_VERSION,
700 VPX_CODEC_CAP_DECODER | VP9_CAP_POSTPROC,
701 /* vpx_codec_caps_t caps; */
702 vp9_init, /* vpx_codec_init_fn_t init; */
703 vp9_destroy, /* vpx_codec_destroy_fn_t destroy; */
704 ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
705 vp9_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
706 vp9_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
707 { // NOLINT
708 vp9_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
709 vp9_get_si, /* vpx_codec_get_si_fn_t get_si; */
710 vp9_decode, /* vpx_codec_decode_fn_t decode; */
711 vp9_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
712 },
713 { // NOLINT
714 /* encoder functions */
715 NOT_IMPLEMENTED,
716 NOT_IMPLEMENTED,
717 NOT_IMPLEMENTED,
718 NOT_IMPLEMENTED,
719 NOT_IMPLEMENTED,
720 NOT_IMPLEMENTED
721 }
722 };