Thu, 15 Jan 2015 15:59:08 +0100
Implement a real Private Browsing Mode condition by changing the API/ABI;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
michael@0 | 1 | /* |
michael@0 | 2 | * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
michael@0 | 3 | * |
michael@0 | 4 | * Use of this source code is governed by a BSD-style license |
michael@0 | 5 | * that can be found in the LICENSE file in the root of the source |
michael@0 | 6 | * tree. An additional intellectual property rights grant can be found |
michael@0 | 7 | * in the file PATENTS. All contributing project authors may |
michael@0 | 8 | * be found in the AUTHORS file in the root of the source tree. |
michael@0 | 9 | */ |
michael@0 | 10 | |
michael@0 | 11 | |
michael@0 | 12 | #include <stdlib.h> |
michael@0 | 13 | #include <string.h> |
michael@0 | 14 | #include "vp8_rtcd.h" |
michael@0 | 15 | #include "vpx/vpx_decoder.h" |
michael@0 | 16 | #include "vpx/vp8dx.h" |
michael@0 | 17 | #include "vpx/internal/vpx_codec_internal.h" |
michael@0 | 18 | #include "vpx_version.h" |
michael@0 | 19 | #include "common/onyxd.h" |
michael@0 | 20 | #include "decoder/onyxd_int.h" |
michael@0 | 21 | #include "common/alloccommon.h" |
michael@0 | 22 | #include "vpx_mem/vpx_mem.h" |
michael@0 | 23 | #if CONFIG_ERROR_CONCEALMENT |
michael@0 | 24 | #include "decoder/error_concealment.h" |
michael@0 | 25 | #endif |
michael@0 | 26 | #include "decoder/decoderthreading.h" |
michael@0 | 27 | |
michael@0 | 28 | #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0) |
michael@0 | 29 | #define VP8_CAP_ERROR_CONCEALMENT (CONFIG_ERROR_CONCEALMENT ? \ |
michael@0 | 30 | VPX_CODEC_CAP_ERROR_CONCEALMENT : 0) |
michael@0 | 31 | |
michael@0 | 32 | typedef vpx_codec_stream_info_t vp8_stream_info_t; |
michael@0 | 33 | |
michael@0 | 34 | /* Structures for handling memory allocations */ |
michael@0 | 35 | typedef enum |
michael@0 | 36 | { |
michael@0 | 37 | VP8_SEG_ALG_PRIV = 256, |
michael@0 | 38 | VP8_SEG_MAX |
michael@0 | 39 | } mem_seg_id_t; |
michael@0 | 40 | #define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0]))) |
michael@0 | 41 | |
michael@0 | 42 | static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t); |
michael@0 | 43 | |
michael@0 | 44 | static const mem_req_t vp8_mem_req_segs[] = |
michael@0 | 45 | { |
michael@0 | 46 | {VP8_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, vp8_priv_sz}, |
michael@0 | 47 | {VP8_SEG_MAX, 0, 0, 0, NULL} |
michael@0 | 48 | }; |
michael@0 | 49 | |
michael@0 | 50 | struct vpx_codec_alg_priv |
michael@0 | 51 | { |
michael@0 | 52 | vpx_codec_priv_t base; |
michael@0 | 53 | vpx_codec_mmap_t mmaps[NELEMENTS(vp8_mem_req_segs)-1]; |
michael@0 | 54 | vpx_codec_dec_cfg_t cfg; |
michael@0 | 55 | vp8_stream_info_t si; |
michael@0 | 56 | int defer_alloc; |
michael@0 | 57 | int decoder_init; |
michael@0 | 58 | int postproc_cfg_set; |
michael@0 | 59 | vp8_postproc_cfg_t postproc_cfg; |
michael@0 | 60 | #if CONFIG_POSTPROC_VISUALIZER |
michael@0 | 61 | unsigned int dbg_postproc_flag; |
michael@0 | 62 | int dbg_color_ref_frame_flag; |
michael@0 | 63 | int dbg_color_mb_modes_flag; |
michael@0 | 64 | int dbg_color_b_modes_flag; |
michael@0 | 65 | int dbg_display_mv_flag; |
michael@0 | 66 | #endif |
michael@0 | 67 | vp8_decrypt_cb *decrypt_cb; |
michael@0 | 68 | void *decrypt_state; |
michael@0 | 69 | vpx_image_t img; |
michael@0 | 70 | int img_setup; |
michael@0 | 71 | struct frame_buffers yv12_frame_buffers; |
michael@0 | 72 | void *user_priv; |
michael@0 | 73 | FRAGMENT_DATA fragments; |
michael@0 | 74 | }; |
michael@0 | 75 | |
michael@0 | 76 | static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t flags) |
michael@0 | 77 | { |
michael@0 | 78 | /* Although this declaration is constant, we can't use it in the requested |
michael@0 | 79 | * segments list because we want to define the requested segments list |
michael@0 | 80 | * before defining the private type (so that the number of memory maps is |
michael@0 | 81 | * known) |
michael@0 | 82 | */ |
michael@0 | 83 | (void)si; |
michael@0 | 84 | return sizeof(vpx_codec_alg_priv_t); |
michael@0 | 85 | } |
michael@0 | 86 | |
michael@0 | 87 | static void vp8_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap) |
michael@0 | 88 | { |
michael@0 | 89 | int i; |
michael@0 | 90 | |
michael@0 | 91 | ctx->priv = mmap->base; |
michael@0 | 92 | ctx->priv->sz = sizeof(*ctx->priv); |
michael@0 | 93 | ctx->priv->iface = ctx->iface; |
michael@0 | 94 | ctx->priv->alg_priv = mmap->base; |
michael@0 | 95 | |
michael@0 | 96 | for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) |
michael@0 | 97 | ctx->priv->alg_priv->mmaps[i].id = vp8_mem_req_segs[i].id; |
michael@0 | 98 | |
michael@0 | 99 | ctx->priv->alg_priv->mmaps[0] = *mmap; |
michael@0 | 100 | ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si); |
michael@0 | 101 | ctx->priv->alg_priv->decrypt_cb = NULL; |
michael@0 | 102 | ctx->priv->alg_priv->decrypt_state = NULL; |
michael@0 | 103 | ctx->priv->init_flags = ctx->init_flags; |
michael@0 | 104 | |
michael@0 | 105 | if (ctx->config.dec) |
michael@0 | 106 | { |
michael@0 | 107 | /* Update the reference to the config structure to an internal copy. */ |
michael@0 | 108 | ctx->priv->alg_priv->cfg = *ctx->config.dec; |
michael@0 | 109 | ctx->config.dec = &ctx->priv->alg_priv->cfg; |
michael@0 | 110 | } |
michael@0 | 111 | } |
michael@0 | 112 | |
michael@0 | 113 | static void vp8_finalize_mmaps(vpx_codec_alg_priv_t *ctx) |
michael@0 | 114 | { |
michael@0 | 115 | /* nothing to clean up */ |
michael@0 | 116 | } |
michael@0 | 117 | |
michael@0 | 118 | static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx, |
michael@0 | 119 | vpx_codec_priv_enc_mr_cfg_t *data) |
michael@0 | 120 | { |
michael@0 | 121 | vpx_codec_err_t res = VPX_CODEC_OK; |
michael@0 | 122 | (void) data; |
michael@0 | 123 | |
michael@0 | 124 | vp8_rtcd(); |
michael@0 | 125 | |
michael@0 | 126 | /* This function only allocates space for the vpx_codec_alg_priv_t |
michael@0 | 127 | * structure. More memory may be required at the time the stream |
michael@0 | 128 | * information becomes known. |
michael@0 | 129 | */ |
michael@0 | 130 | if (!ctx->priv) |
michael@0 | 131 | { |
michael@0 | 132 | vpx_codec_mmap_t mmap; |
michael@0 | 133 | |
michael@0 | 134 | mmap.id = vp8_mem_req_segs[0].id; |
michael@0 | 135 | mmap.sz = sizeof(vpx_codec_alg_priv_t); |
michael@0 | 136 | mmap.align = vp8_mem_req_segs[0].align; |
michael@0 | 137 | mmap.flags = vp8_mem_req_segs[0].flags; |
michael@0 | 138 | |
michael@0 | 139 | res = vpx_mmap_alloc(&mmap); |
michael@0 | 140 | if (res != VPX_CODEC_OK) return res; |
michael@0 | 141 | |
michael@0 | 142 | vp8_init_ctx(ctx, &mmap); |
michael@0 | 143 | |
michael@0 | 144 | /* initialize number of fragments to zero */ |
michael@0 | 145 | ctx->priv->alg_priv->fragments.count = 0; |
michael@0 | 146 | /* is input fragments enabled? */ |
michael@0 | 147 | ctx->priv->alg_priv->fragments.enabled = |
michael@0 | 148 | (ctx->priv->alg_priv->base.init_flags & |
michael@0 | 149 | VPX_CODEC_USE_INPUT_FRAGMENTS); |
michael@0 | 150 | |
michael@0 | 151 | ctx->priv->alg_priv->defer_alloc = 1; |
michael@0 | 152 | /*post processing level initialized to do nothing */ |
michael@0 | 153 | } |
michael@0 | 154 | |
michael@0 | 155 | ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads = |
michael@0 | 156 | (ctx->priv->alg_priv->base.init_flags & |
michael@0 | 157 | VPX_CODEC_USE_FRAME_THREADING); |
michael@0 | 158 | |
michael@0 | 159 | /* for now, disable frame threading */ |
michael@0 | 160 | ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads = 0; |
michael@0 | 161 | |
michael@0 | 162 | if(ctx->priv->alg_priv->yv12_frame_buffers.use_frame_threads && |
michael@0 | 163 | (( ctx->priv->alg_priv->base.init_flags & |
michael@0 | 164 | VPX_CODEC_USE_ERROR_CONCEALMENT) |
michael@0 | 165 | || ( ctx->priv->alg_priv->base.init_flags & |
michael@0 | 166 | VPX_CODEC_USE_INPUT_FRAGMENTS) ) ) |
michael@0 | 167 | { |
michael@0 | 168 | /* row-based threading, error concealment, and input fragments will |
michael@0 | 169 | * not be supported when using frame-based threading */ |
michael@0 | 170 | res = VPX_CODEC_INVALID_PARAM; |
michael@0 | 171 | } |
michael@0 | 172 | |
michael@0 | 173 | return res; |
michael@0 | 174 | } |
michael@0 | 175 | |
michael@0 | 176 | static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx) |
michael@0 | 177 | { |
michael@0 | 178 | int i; |
michael@0 | 179 | |
michael@0 | 180 | vp8_remove_decoder_instances(&ctx->yv12_frame_buffers); |
michael@0 | 181 | |
michael@0 | 182 | for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--) |
michael@0 | 183 | { |
michael@0 | 184 | if (ctx->mmaps[i].dtor) |
michael@0 | 185 | ctx->mmaps[i].dtor(&ctx->mmaps[i]); |
michael@0 | 186 | } |
michael@0 | 187 | |
michael@0 | 188 | return VPX_CODEC_OK; |
michael@0 | 189 | } |
michael@0 | 190 | |
michael@0 | 191 | static vpx_codec_err_t vp8_peek_si_internal(const uint8_t *data, |
michael@0 | 192 | unsigned int data_sz, |
michael@0 | 193 | vpx_codec_stream_info_t *si, |
michael@0 | 194 | vp8_decrypt_cb *decrypt_cb, |
michael@0 | 195 | void *decrypt_state) |
michael@0 | 196 | { |
michael@0 | 197 | vpx_codec_err_t res = VPX_CODEC_OK; |
michael@0 | 198 | |
michael@0 | 199 | if(data + data_sz <= data) |
michael@0 | 200 | { |
michael@0 | 201 | res = VPX_CODEC_INVALID_PARAM; |
michael@0 | 202 | } |
michael@0 | 203 | else |
michael@0 | 204 | { |
michael@0 | 205 | /* Parse uncompresssed part of key frame header. |
michael@0 | 206 | * 3 bytes:- including version, frame type and an offset |
michael@0 | 207 | * 3 bytes:- sync code (0x9d, 0x01, 0x2a) |
michael@0 | 208 | * 4 bytes:- including image width and height in the lowest 14 bits |
michael@0 | 209 | * of each 2-byte value. |
michael@0 | 210 | */ |
michael@0 | 211 | uint8_t clear_buffer[10]; |
michael@0 | 212 | const uint8_t *clear = data; |
michael@0 | 213 | if (decrypt_cb) |
michael@0 | 214 | { |
michael@0 | 215 | int n = data_sz > 10 ? 10 : data_sz; |
michael@0 | 216 | decrypt_cb(decrypt_state, data, clear_buffer, n); |
michael@0 | 217 | clear = clear_buffer; |
michael@0 | 218 | } |
michael@0 | 219 | si->is_kf = 0; |
michael@0 | 220 | |
michael@0 | 221 | if (data_sz >= 10 && !(clear[0] & 0x01)) /* I-Frame */ |
michael@0 | 222 | { |
michael@0 | 223 | si->is_kf = 1; |
michael@0 | 224 | |
michael@0 | 225 | /* vet via sync code */ |
michael@0 | 226 | if (clear[3] != 0x9d || clear[4] != 0x01 || clear[5] != 0x2a) |
michael@0 | 227 | res = VPX_CODEC_UNSUP_BITSTREAM; |
michael@0 | 228 | |
michael@0 | 229 | si->w = (clear[6] | (clear[7] << 8)) & 0x3fff; |
michael@0 | 230 | si->h = (clear[8] | (clear[9] << 8)) & 0x3fff; |
michael@0 | 231 | |
michael@0 | 232 | /*printf("w=%d, h=%d\n", si->w, si->h);*/ |
michael@0 | 233 | if (!(si->h | si->w)) |
michael@0 | 234 | res = VPX_CODEC_UNSUP_BITSTREAM; |
michael@0 | 235 | } |
michael@0 | 236 | else |
michael@0 | 237 | { |
michael@0 | 238 | res = VPX_CODEC_UNSUP_BITSTREAM; |
michael@0 | 239 | } |
michael@0 | 240 | } |
michael@0 | 241 | |
michael@0 | 242 | return res; |
michael@0 | 243 | } |
michael@0 | 244 | |
michael@0 | 245 | static vpx_codec_err_t vp8_peek_si(const uint8_t *data, |
michael@0 | 246 | unsigned int data_sz, |
michael@0 | 247 | vpx_codec_stream_info_t *si) { |
michael@0 | 248 | return vp8_peek_si_internal(data, data_sz, si, NULL, NULL); |
michael@0 | 249 | } |
michael@0 | 250 | |
michael@0 | 251 | static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx, |
michael@0 | 252 | vpx_codec_stream_info_t *si) |
michael@0 | 253 | { |
michael@0 | 254 | |
michael@0 | 255 | unsigned int sz; |
michael@0 | 256 | |
michael@0 | 257 | if (si->sz >= sizeof(vp8_stream_info_t)) |
michael@0 | 258 | sz = sizeof(vp8_stream_info_t); |
michael@0 | 259 | else |
michael@0 | 260 | sz = sizeof(vpx_codec_stream_info_t); |
michael@0 | 261 | |
michael@0 | 262 | memcpy(si, &ctx->si, sz); |
michael@0 | 263 | si->sz = sz; |
michael@0 | 264 | |
michael@0 | 265 | return VPX_CODEC_OK; |
michael@0 | 266 | } |
michael@0 | 267 | |
michael@0 | 268 | |
michael@0 | 269 | static vpx_codec_err_t |
michael@0 | 270 | update_error_state(vpx_codec_alg_priv_t *ctx, |
michael@0 | 271 | const struct vpx_internal_error_info *error) |
michael@0 | 272 | { |
michael@0 | 273 | vpx_codec_err_t res; |
michael@0 | 274 | |
michael@0 | 275 | if ((res = error->error_code)) |
michael@0 | 276 | ctx->base.err_detail = error->has_detail |
michael@0 | 277 | ? error->detail |
michael@0 | 278 | : NULL; |
michael@0 | 279 | |
michael@0 | 280 | return res; |
michael@0 | 281 | } |
michael@0 | 282 | |
michael@0 | 283 | static void yuvconfig2image(vpx_image_t *img, |
michael@0 | 284 | const YV12_BUFFER_CONFIG *yv12, |
michael@0 | 285 | void *user_priv) |
michael@0 | 286 | { |
michael@0 | 287 | /** vpx_img_wrap() doesn't allow specifying independent strides for |
michael@0 | 288 | * the Y, U, and V planes, nor other alignment adjustments that |
michael@0 | 289 | * might be representable by a YV12_BUFFER_CONFIG, so we just |
michael@0 | 290 | * initialize all the fields.*/ |
michael@0 | 291 | img->fmt = VPX_IMG_FMT_I420; |
michael@0 | 292 | img->w = yv12->y_stride; |
michael@0 | 293 | img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15; |
michael@0 | 294 | img->d_w = yv12->y_width; |
michael@0 | 295 | img->d_h = yv12->y_height; |
michael@0 | 296 | img->x_chroma_shift = 1; |
michael@0 | 297 | img->y_chroma_shift = 1; |
michael@0 | 298 | img->planes[VPX_PLANE_Y] = yv12->y_buffer; |
michael@0 | 299 | img->planes[VPX_PLANE_U] = yv12->u_buffer; |
michael@0 | 300 | img->planes[VPX_PLANE_V] = yv12->v_buffer; |
michael@0 | 301 | img->planes[VPX_PLANE_ALPHA] = NULL; |
michael@0 | 302 | img->stride[VPX_PLANE_Y] = yv12->y_stride; |
michael@0 | 303 | img->stride[VPX_PLANE_U] = yv12->uv_stride; |
michael@0 | 304 | img->stride[VPX_PLANE_V] = yv12->uv_stride; |
michael@0 | 305 | img->stride[VPX_PLANE_ALPHA] = yv12->y_stride; |
michael@0 | 306 | img->bps = 12; |
michael@0 | 307 | img->user_priv = user_priv; |
michael@0 | 308 | img->img_data = yv12->buffer_alloc; |
michael@0 | 309 | img->img_data_owner = 0; |
michael@0 | 310 | img->self_allocd = 0; |
michael@0 | 311 | } |
michael@0 | 312 | |
michael@0 | 313 | static int |
michael@0 | 314 | update_fragments(vpx_codec_alg_priv_t *ctx, |
michael@0 | 315 | const uint8_t *data, |
michael@0 | 316 | unsigned int data_sz, |
michael@0 | 317 | vpx_codec_err_t *res) |
michael@0 | 318 | { |
michael@0 | 319 | *res = VPX_CODEC_OK; |
michael@0 | 320 | |
michael@0 | 321 | if (ctx->fragments.count == 0) |
michael@0 | 322 | { |
michael@0 | 323 | /* New frame, reset fragment pointers and sizes */ |
michael@0 | 324 | vpx_memset((void*)ctx->fragments.ptrs, 0, sizeof(ctx->fragments.ptrs)); |
michael@0 | 325 | vpx_memset(ctx->fragments.sizes, 0, sizeof(ctx->fragments.sizes)); |
michael@0 | 326 | } |
michael@0 | 327 | if (ctx->fragments.enabled && !(data == NULL && data_sz == 0)) |
michael@0 | 328 | { |
michael@0 | 329 | /* Store a pointer to this fragment and return. We haven't |
michael@0 | 330 | * received the complete frame yet, so we will wait with decoding. |
michael@0 | 331 | */ |
michael@0 | 332 | ctx->fragments.ptrs[ctx->fragments.count] = data; |
michael@0 | 333 | ctx->fragments.sizes[ctx->fragments.count] = data_sz; |
michael@0 | 334 | ctx->fragments.count++; |
michael@0 | 335 | if (ctx->fragments.count > (1 << EIGHT_PARTITION) + 1) |
michael@0 | 336 | { |
michael@0 | 337 | ctx->fragments.count = 0; |
michael@0 | 338 | *res = VPX_CODEC_INVALID_PARAM; |
michael@0 | 339 | return -1; |
michael@0 | 340 | } |
michael@0 | 341 | return 0; |
michael@0 | 342 | } |
michael@0 | 343 | |
michael@0 | 344 | if (!ctx->fragments.enabled) |
michael@0 | 345 | { |
michael@0 | 346 | ctx->fragments.ptrs[0] = data; |
michael@0 | 347 | ctx->fragments.sizes[0] = data_sz; |
michael@0 | 348 | ctx->fragments.count = 1; |
michael@0 | 349 | } |
michael@0 | 350 | |
michael@0 | 351 | return 1; |
michael@0 | 352 | } |
michael@0 | 353 | |
michael@0 | 354 | static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx, |
michael@0 | 355 | const uint8_t *data, |
michael@0 | 356 | unsigned int data_sz, |
michael@0 | 357 | void *user_priv, |
michael@0 | 358 | long deadline) |
michael@0 | 359 | { |
michael@0 | 360 | vpx_codec_err_t res = VPX_CODEC_OK; |
michael@0 | 361 | unsigned int resolution_change = 0; |
michael@0 | 362 | unsigned int w, h; |
michael@0 | 363 | |
michael@0 | 364 | |
michael@0 | 365 | /* Update the input fragment data */ |
michael@0 | 366 | if(update_fragments(ctx, data, data_sz, &res) <= 0) |
michael@0 | 367 | return res; |
michael@0 | 368 | |
michael@0 | 369 | /* Determine the stream parameters. Note that we rely on peek_si to |
michael@0 | 370 | * validate that we have a buffer that does not wrap around the top |
michael@0 | 371 | * of the heap. |
michael@0 | 372 | */ |
michael@0 | 373 | w = ctx->si.w; |
michael@0 | 374 | h = ctx->si.h; |
michael@0 | 375 | |
michael@0 | 376 | res = vp8_peek_si_internal(ctx->fragments.ptrs[0], ctx->fragments.sizes[0], |
michael@0 | 377 | &ctx->si, ctx->decrypt_cb, ctx->decrypt_state); |
michael@0 | 378 | |
michael@0 | 379 | if((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf) |
michael@0 | 380 | { |
michael@0 | 381 | /* the peek function returns an error for non keyframes, however for |
michael@0 | 382 | * this case, it is not an error */ |
michael@0 | 383 | res = VPX_CODEC_OK; |
michael@0 | 384 | } |
michael@0 | 385 | |
michael@0 | 386 | if(!ctx->decoder_init && !ctx->si.is_kf) |
michael@0 | 387 | res = VPX_CODEC_UNSUP_BITSTREAM; |
michael@0 | 388 | |
michael@0 | 389 | if ((ctx->si.h != h) || (ctx->si.w != w)) |
michael@0 | 390 | resolution_change = 1; |
michael@0 | 391 | |
michael@0 | 392 | /* Perform deferred allocations, if required */ |
michael@0 | 393 | if (!res && ctx->defer_alloc) |
michael@0 | 394 | { |
michael@0 | 395 | int i; |
michael@0 | 396 | |
michael@0 | 397 | for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) |
michael@0 | 398 | { |
michael@0 | 399 | vpx_codec_dec_cfg_t cfg; |
michael@0 | 400 | |
michael@0 | 401 | cfg.w = ctx->si.w; |
michael@0 | 402 | cfg.h = ctx->si.h; |
michael@0 | 403 | ctx->mmaps[i].id = vp8_mem_req_segs[i].id; |
michael@0 | 404 | ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz; |
michael@0 | 405 | ctx->mmaps[i].align = vp8_mem_req_segs[i].align; |
michael@0 | 406 | ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags; |
michael@0 | 407 | |
michael@0 | 408 | if (!ctx->mmaps[i].sz) |
michael@0 | 409 | ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg, |
michael@0 | 410 | ctx->base.init_flags); |
michael@0 | 411 | |
michael@0 | 412 | res = vpx_mmap_alloc(&ctx->mmaps[i]); |
michael@0 | 413 | } |
michael@0 | 414 | |
michael@0 | 415 | if (!res) |
michael@0 | 416 | vp8_finalize_mmaps(ctx); |
michael@0 | 417 | |
michael@0 | 418 | ctx->defer_alloc = 0; |
michael@0 | 419 | } |
michael@0 | 420 | |
michael@0 | 421 | /* Initialize the decoder instance on the first frame*/ |
michael@0 | 422 | if (!res && !ctx->decoder_init) |
michael@0 | 423 | { |
michael@0 | 424 | res = vpx_validate_mmaps(&ctx->si, ctx->mmaps, |
michael@0 | 425 | vp8_mem_req_segs, NELEMENTS(vp8_mem_req_segs), |
michael@0 | 426 | ctx->base.init_flags); |
michael@0 | 427 | |
michael@0 | 428 | if (!res) |
michael@0 | 429 | { |
michael@0 | 430 | VP8D_CONFIG oxcf; |
michael@0 | 431 | |
michael@0 | 432 | oxcf.Width = ctx->si.w; |
michael@0 | 433 | oxcf.Height = ctx->si.h; |
michael@0 | 434 | oxcf.Version = 9; |
michael@0 | 435 | oxcf.postprocess = 0; |
michael@0 | 436 | oxcf.max_threads = ctx->cfg.threads; |
michael@0 | 437 | oxcf.error_concealment = |
michael@0 | 438 | (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT); |
michael@0 | 439 | |
michael@0 | 440 | /* If postprocessing was enabled by the application and a |
michael@0 | 441 | * configuration has not been provided, default it. |
michael@0 | 442 | */ |
michael@0 | 443 | if (!ctx->postproc_cfg_set |
michael@0 | 444 | && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) |
michael@0 | 445 | { |
michael@0 | 446 | ctx->postproc_cfg.post_proc_flag = |
michael@0 | 447 | VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE; |
michael@0 | 448 | ctx->postproc_cfg.deblocking_level = 4; |
michael@0 | 449 | ctx->postproc_cfg.noise_level = 0; |
michael@0 | 450 | } |
michael@0 | 451 | |
michael@0 | 452 | res = vp8_create_decoder_instances(&ctx->yv12_frame_buffers, &oxcf); |
michael@0 | 453 | ctx->yv12_frame_buffers.pbi[0]->decrypt_cb = ctx->decrypt_cb; |
michael@0 | 454 | ctx->yv12_frame_buffers.pbi[0]->decrypt_state = ctx->decrypt_state; |
michael@0 | 455 | } |
michael@0 | 456 | |
michael@0 | 457 | ctx->decoder_init = 1; |
michael@0 | 458 | } |
michael@0 | 459 | |
michael@0 | 460 | if (!res) |
michael@0 | 461 | { |
michael@0 | 462 | VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0]; |
michael@0 | 463 | if(resolution_change) |
michael@0 | 464 | { |
michael@0 | 465 | VP8_COMMON *const pc = & pbi->common; |
michael@0 | 466 | MACROBLOCKD *const xd = & pbi->mb; |
michael@0 | 467 | #if CONFIG_MULTITHREAD |
michael@0 | 468 | int i; |
michael@0 | 469 | #endif |
michael@0 | 470 | pc->Width = ctx->si.w; |
michael@0 | 471 | pc->Height = ctx->si.h; |
michael@0 | 472 | { |
michael@0 | 473 | int prev_mb_rows = pc->mb_rows; |
michael@0 | 474 | |
michael@0 | 475 | if (setjmp(pbi->common.error.jmp)) |
michael@0 | 476 | { |
michael@0 | 477 | pbi->common.error.setjmp = 0; |
michael@0 | 478 | /* same return value as used in vp8dx_receive_compressed_data */ |
michael@0 | 479 | return -1; |
michael@0 | 480 | } |
michael@0 | 481 | |
michael@0 | 482 | pbi->common.error.setjmp = 1; |
michael@0 | 483 | |
michael@0 | 484 | if (pc->Width <= 0) |
michael@0 | 485 | { |
michael@0 | 486 | pc->Width = w; |
michael@0 | 487 | vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
michael@0 | 488 | "Invalid frame width"); |
michael@0 | 489 | } |
michael@0 | 490 | |
michael@0 | 491 | if (pc->Height <= 0) |
michael@0 | 492 | { |
michael@0 | 493 | pc->Height = h; |
michael@0 | 494 | vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME, |
michael@0 | 495 | "Invalid frame height"); |
michael@0 | 496 | } |
michael@0 | 497 | |
michael@0 | 498 | if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height)) |
michael@0 | 499 | vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
michael@0 | 500 | "Failed to allocate frame buffers"); |
michael@0 | 501 | |
michael@0 | 502 | xd->pre = pc->yv12_fb[pc->lst_fb_idx]; |
michael@0 | 503 | xd->dst = pc->yv12_fb[pc->new_fb_idx]; |
michael@0 | 504 | |
michael@0 | 505 | #if CONFIG_MULTITHREAD |
michael@0 | 506 | for (i = 0; i < pbi->allocated_decoding_thread_count; i++) |
michael@0 | 507 | { |
michael@0 | 508 | pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx]; |
michael@0 | 509 | vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd); |
michael@0 | 510 | } |
michael@0 | 511 | #endif |
michael@0 | 512 | vp8_build_block_doffsets(&pbi->mb); |
michael@0 | 513 | |
michael@0 | 514 | /* allocate memory for last frame MODE_INFO array */ |
michael@0 | 515 | #if CONFIG_ERROR_CONCEALMENT |
michael@0 | 516 | |
michael@0 | 517 | if (pbi->ec_enabled) |
michael@0 | 518 | { |
michael@0 | 519 | /* old prev_mip was released by vp8_de_alloc_frame_buffers() |
michael@0 | 520 | * called in vp8_alloc_frame_buffers() */ |
michael@0 | 521 | pc->prev_mip = vpx_calloc( |
michael@0 | 522 | (pc->mb_cols + 1) * (pc->mb_rows + 1), |
michael@0 | 523 | sizeof(MODE_INFO)); |
michael@0 | 524 | |
michael@0 | 525 | if (!pc->prev_mip) |
michael@0 | 526 | { |
michael@0 | 527 | vp8_de_alloc_frame_buffers(pc); |
michael@0 | 528 | vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
michael@0 | 529 | "Failed to allocate" |
michael@0 | 530 | "last frame MODE_INFO array"); |
michael@0 | 531 | } |
michael@0 | 532 | |
michael@0 | 533 | pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1; |
michael@0 | 534 | |
michael@0 | 535 | if (vp8_alloc_overlap_lists(pbi)) |
michael@0 | 536 | vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR, |
michael@0 | 537 | "Failed to allocate overlap lists " |
michael@0 | 538 | "for error concealment"); |
michael@0 | 539 | } |
michael@0 | 540 | |
michael@0 | 541 | #endif |
michael@0 | 542 | |
michael@0 | 543 | #if CONFIG_MULTITHREAD |
michael@0 | 544 | if (pbi->b_multithreaded_rd) |
michael@0 | 545 | vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows); |
michael@0 | 546 | #else |
michael@0 | 547 | (void)prev_mb_rows; |
michael@0 | 548 | #endif |
michael@0 | 549 | } |
michael@0 | 550 | |
michael@0 | 551 | pbi->common.error.setjmp = 0; |
michael@0 | 552 | |
michael@0 | 553 | /* required to get past the first get_free_fb() call */ |
michael@0 | 554 | pbi->common.fb_idx_ref_cnt[0] = 0; |
michael@0 | 555 | } |
michael@0 | 556 | |
michael@0 | 557 | /* update the pbi fragment data */ |
michael@0 | 558 | pbi->fragments = ctx->fragments; |
michael@0 | 559 | |
michael@0 | 560 | ctx->user_priv = user_priv; |
michael@0 | 561 | if (vp8dx_receive_compressed_data(pbi, data_sz, data, deadline)) |
michael@0 | 562 | { |
michael@0 | 563 | res = update_error_state(ctx, &pbi->common.error); |
michael@0 | 564 | } |
michael@0 | 565 | |
michael@0 | 566 | /* get ready for the next series of fragments */ |
michael@0 | 567 | ctx->fragments.count = 0; |
michael@0 | 568 | } |
michael@0 | 569 | |
michael@0 | 570 | return res; |
michael@0 | 571 | } |
michael@0 | 572 | |
michael@0 | 573 | static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx, |
michael@0 | 574 | vpx_codec_iter_t *iter) |
michael@0 | 575 | { |
michael@0 | 576 | vpx_image_t *img = NULL; |
michael@0 | 577 | |
michael@0 | 578 | /* iter acts as a flip flop, so an image is only returned on the first |
michael@0 | 579 | * call to get_frame. |
michael@0 | 580 | */ |
michael@0 | 581 | if (!(*iter) && ctx->yv12_frame_buffers.pbi[0]) |
michael@0 | 582 | { |
michael@0 | 583 | YV12_BUFFER_CONFIG sd; |
michael@0 | 584 | int64_t time_stamp = 0, time_end_stamp = 0; |
michael@0 | 585 | vp8_ppflags_t flags = {0}; |
michael@0 | 586 | |
michael@0 | 587 | if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) |
michael@0 | 588 | { |
michael@0 | 589 | flags.post_proc_flag= ctx->postproc_cfg.post_proc_flag |
michael@0 | 590 | #if CONFIG_POSTPROC_VISUALIZER |
michael@0 | 591 | |
michael@0 | 592 | | ((ctx->dbg_color_ref_frame_flag != 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS : 0) |
michael@0 | 593 | | ((ctx->dbg_color_mb_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0) |
michael@0 | 594 | | ((ctx->dbg_color_b_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0) |
michael@0 | 595 | | ((ctx->dbg_display_mv_flag != 0) ? VP8D_DEBUG_DRAW_MV : 0) |
michael@0 | 596 | #endif |
michael@0 | 597 | ; |
michael@0 | 598 | flags.deblocking_level = ctx->postproc_cfg.deblocking_level; |
michael@0 | 599 | flags.noise_level = ctx->postproc_cfg.noise_level; |
michael@0 | 600 | #if CONFIG_POSTPROC_VISUALIZER |
michael@0 | 601 | flags.display_ref_frame_flag= ctx->dbg_color_ref_frame_flag; |
michael@0 | 602 | flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag; |
michael@0 | 603 | flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag; |
michael@0 | 604 | flags.display_mv_flag = ctx->dbg_display_mv_flag; |
michael@0 | 605 | #endif |
michael@0 | 606 | } |
michael@0 | 607 | |
michael@0 | 608 | if (0 == vp8dx_get_raw_frame(ctx->yv12_frame_buffers.pbi[0], &sd, |
michael@0 | 609 | &time_stamp, &time_end_stamp, &flags)) |
michael@0 | 610 | { |
michael@0 | 611 | yuvconfig2image(&ctx->img, &sd, ctx->user_priv); |
michael@0 | 612 | |
michael@0 | 613 | img = &ctx->img; |
michael@0 | 614 | *iter = img; |
michael@0 | 615 | } |
michael@0 | 616 | } |
michael@0 | 617 | |
michael@0 | 618 | return img; |
michael@0 | 619 | } |
michael@0 | 620 | |
michael@0 | 621 | |
michael@0 | 622 | static |
michael@0 | 623 | vpx_codec_err_t vp8_xma_get_mmap(const vpx_codec_ctx_t *ctx, |
michael@0 | 624 | vpx_codec_mmap_t *mmap, |
michael@0 | 625 | vpx_codec_iter_t *iter) |
michael@0 | 626 | { |
michael@0 | 627 | vpx_codec_err_t res; |
michael@0 | 628 | const mem_req_t *seg_iter = *iter; |
michael@0 | 629 | |
michael@0 | 630 | /* Get address of next segment request */ |
michael@0 | 631 | do |
michael@0 | 632 | { |
michael@0 | 633 | if (!seg_iter) |
michael@0 | 634 | seg_iter = vp8_mem_req_segs; |
michael@0 | 635 | else if (seg_iter->id != VP8_SEG_MAX) |
michael@0 | 636 | seg_iter++; |
michael@0 | 637 | |
michael@0 | 638 | *iter = (vpx_codec_iter_t)seg_iter; |
michael@0 | 639 | |
michael@0 | 640 | if (seg_iter->id != VP8_SEG_MAX) |
michael@0 | 641 | { |
michael@0 | 642 | mmap->id = seg_iter->id; |
michael@0 | 643 | mmap->sz = seg_iter->sz; |
michael@0 | 644 | mmap->align = seg_iter->align; |
michael@0 | 645 | mmap->flags = seg_iter->flags; |
michael@0 | 646 | |
michael@0 | 647 | if (!seg_iter->sz) |
michael@0 | 648 | mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags); |
michael@0 | 649 | |
michael@0 | 650 | res = VPX_CODEC_OK; |
michael@0 | 651 | } |
michael@0 | 652 | else |
michael@0 | 653 | res = VPX_CODEC_LIST_END; |
michael@0 | 654 | } |
michael@0 | 655 | while (!mmap->sz && res != VPX_CODEC_LIST_END); |
michael@0 | 656 | |
michael@0 | 657 | return res; |
michael@0 | 658 | } |
michael@0 | 659 | |
michael@0 | 660 | static vpx_codec_err_t vp8_xma_set_mmap(vpx_codec_ctx_t *ctx, |
michael@0 | 661 | const vpx_codec_mmap_t *mmap) |
michael@0 | 662 | { |
michael@0 | 663 | vpx_codec_err_t res = VPX_CODEC_MEM_ERROR; |
michael@0 | 664 | int i, done; |
michael@0 | 665 | |
michael@0 | 666 | if (!ctx->priv) |
michael@0 | 667 | { |
michael@0 | 668 | if (mmap->id == VP8_SEG_ALG_PRIV) |
michael@0 | 669 | { |
michael@0 | 670 | if (!ctx->priv) |
michael@0 | 671 | { |
michael@0 | 672 | vp8_init_ctx(ctx, mmap); |
michael@0 | 673 | res = VPX_CODEC_OK; |
michael@0 | 674 | } |
michael@0 | 675 | } |
michael@0 | 676 | } |
michael@0 | 677 | |
michael@0 | 678 | done = 1; |
michael@0 | 679 | |
michael@0 | 680 | if (!res && ctx->priv->alg_priv) |
michael@0 | 681 | { |
michael@0 | 682 | for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) |
michael@0 | 683 | { |
michael@0 | 684 | if (ctx->priv->alg_priv->mmaps[i].id == mmap->id) |
michael@0 | 685 | if (!ctx->priv->alg_priv->mmaps[i].base) |
michael@0 | 686 | { |
michael@0 | 687 | ctx->priv->alg_priv->mmaps[i] = *mmap; |
michael@0 | 688 | res = VPX_CODEC_OK; |
michael@0 | 689 | } |
michael@0 | 690 | |
michael@0 | 691 | done &= (ctx->priv->alg_priv->mmaps[i].base != NULL); |
michael@0 | 692 | } |
michael@0 | 693 | } |
michael@0 | 694 | |
michael@0 | 695 | if (done && !res) |
michael@0 | 696 | { |
michael@0 | 697 | vp8_finalize_mmaps(ctx->priv->alg_priv); |
michael@0 | 698 | res = ctx->iface->init(ctx, NULL); |
michael@0 | 699 | } |
michael@0 | 700 | |
michael@0 | 701 | return res; |
michael@0 | 702 | } |
michael@0 | 703 | |
michael@0 | 704 | static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img, |
michael@0 | 705 | YV12_BUFFER_CONFIG *yv12) |
michael@0 | 706 | { |
michael@0 | 707 | vpx_codec_err_t res = VPX_CODEC_OK; |
michael@0 | 708 | yv12->y_buffer = img->planes[VPX_PLANE_Y]; |
michael@0 | 709 | yv12->u_buffer = img->planes[VPX_PLANE_U]; |
michael@0 | 710 | yv12->v_buffer = img->planes[VPX_PLANE_V]; |
michael@0 | 711 | |
michael@0 | 712 | yv12->y_crop_width = img->d_w; |
michael@0 | 713 | yv12->y_crop_height = img->d_h; |
michael@0 | 714 | yv12->y_width = img->d_w; |
michael@0 | 715 | yv12->y_height = img->d_h; |
michael@0 | 716 | yv12->uv_width = yv12->y_width / 2; |
michael@0 | 717 | yv12->uv_height = yv12->y_height / 2; |
michael@0 | 718 | |
michael@0 | 719 | yv12->y_stride = img->stride[VPX_PLANE_Y]; |
michael@0 | 720 | yv12->uv_stride = img->stride[VPX_PLANE_U]; |
michael@0 | 721 | |
michael@0 | 722 | yv12->border = (img->stride[VPX_PLANE_Y] - img->d_w) / 2; |
michael@0 | 723 | return res; |
michael@0 | 724 | } |
michael@0 | 725 | |
michael@0 | 726 | |
michael@0 | 727 | static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx, |
michael@0 | 728 | int ctr_id, |
michael@0 | 729 | va_list args) |
michael@0 | 730 | { |
michael@0 | 731 | |
michael@0 | 732 | vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *); |
michael@0 | 733 | |
michael@0 | 734 | if (data && !ctx->yv12_frame_buffers.use_frame_threads) |
michael@0 | 735 | { |
michael@0 | 736 | vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data; |
michael@0 | 737 | YV12_BUFFER_CONFIG sd; |
michael@0 | 738 | |
michael@0 | 739 | image2yuvconfig(&frame->img, &sd); |
michael@0 | 740 | |
michael@0 | 741 | return vp8dx_set_reference(ctx->yv12_frame_buffers.pbi[0], |
michael@0 | 742 | frame->frame_type, &sd); |
michael@0 | 743 | } |
michael@0 | 744 | else |
michael@0 | 745 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 746 | |
michael@0 | 747 | } |
michael@0 | 748 | |
michael@0 | 749 | static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx, |
michael@0 | 750 | int ctr_id, |
michael@0 | 751 | va_list args) |
michael@0 | 752 | { |
michael@0 | 753 | |
michael@0 | 754 | vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *); |
michael@0 | 755 | |
michael@0 | 756 | if (data && !ctx->yv12_frame_buffers.use_frame_threads) |
michael@0 | 757 | { |
michael@0 | 758 | vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data; |
michael@0 | 759 | YV12_BUFFER_CONFIG sd; |
michael@0 | 760 | |
michael@0 | 761 | image2yuvconfig(&frame->img, &sd); |
michael@0 | 762 | |
michael@0 | 763 | return vp8dx_get_reference(ctx->yv12_frame_buffers.pbi[0], |
michael@0 | 764 | frame->frame_type, &sd); |
michael@0 | 765 | } |
michael@0 | 766 | else |
michael@0 | 767 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 768 | |
michael@0 | 769 | } |
michael@0 | 770 | |
michael@0 | 771 | static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx, |
michael@0 | 772 | int ctr_id, |
michael@0 | 773 | va_list args) |
michael@0 | 774 | { |
michael@0 | 775 | #if CONFIG_POSTPROC |
michael@0 | 776 | vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *); |
michael@0 | 777 | |
michael@0 | 778 | if (data) |
michael@0 | 779 | { |
michael@0 | 780 | ctx->postproc_cfg_set = 1; |
michael@0 | 781 | ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data); |
michael@0 | 782 | return VPX_CODEC_OK; |
michael@0 | 783 | } |
michael@0 | 784 | else |
michael@0 | 785 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 786 | |
michael@0 | 787 | #else |
michael@0 | 788 | return VPX_CODEC_INCAPABLE; |
michael@0 | 789 | #endif |
michael@0 | 790 | } |
michael@0 | 791 | |
michael@0 | 792 | static vpx_codec_err_t vp8_set_dbg_options(vpx_codec_alg_priv_t *ctx, |
michael@0 | 793 | int ctrl_id, |
michael@0 | 794 | va_list args) |
michael@0 | 795 | { |
michael@0 | 796 | #if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC |
michael@0 | 797 | int data = va_arg(args, int); |
michael@0 | 798 | |
michael@0 | 799 | #define MAP(id, var) case id: var = data; break; |
michael@0 | 800 | |
michael@0 | 801 | switch (ctrl_id) |
michael@0 | 802 | { |
michael@0 | 803 | MAP (VP8_SET_DBG_COLOR_REF_FRAME, ctx->dbg_color_ref_frame_flag); |
michael@0 | 804 | MAP (VP8_SET_DBG_COLOR_MB_MODES, ctx->dbg_color_mb_modes_flag); |
michael@0 | 805 | MAP (VP8_SET_DBG_COLOR_B_MODES, ctx->dbg_color_b_modes_flag); |
michael@0 | 806 | MAP (VP8_SET_DBG_DISPLAY_MV, ctx->dbg_display_mv_flag); |
michael@0 | 807 | } |
michael@0 | 808 | |
michael@0 | 809 | return VPX_CODEC_OK; |
michael@0 | 810 | #else |
michael@0 | 811 | return VPX_CODEC_INCAPABLE; |
michael@0 | 812 | #endif |
michael@0 | 813 | } |
michael@0 | 814 | |
michael@0 | 815 | static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx, |
michael@0 | 816 | int ctrl_id, |
michael@0 | 817 | va_list args) |
michael@0 | 818 | { |
michael@0 | 819 | int *update_info = va_arg(args, int *); |
michael@0 | 820 | |
michael@0 | 821 | if (update_info && !ctx->yv12_frame_buffers.use_frame_threads) |
michael@0 | 822 | { |
michael@0 | 823 | VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0]; |
michael@0 | 824 | |
michael@0 | 825 | *update_info = pbi->common.refresh_alt_ref_frame * (int) VP8_ALTR_FRAME |
michael@0 | 826 | + pbi->common.refresh_golden_frame * (int) VP8_GOLD_FRAME |
michael@0 | 827 | + pbi->common.refresh_last_frame * (int) VP8_LAST_FRAME; |
michael@0 | 828 | |
michael@0 | 829 | return VPX_CODEC_OK; |
michael@0 | 830 | } |
michael@0 | 831 | else |
michael@0 | 832 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 833 | } |
michael@0 | 834 | |
michael@0 | 835 | extern int vp8dx_references_buffer( VP8_COMMON *oci, int ref_frame ); |
michael@0 | 836 | static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx, |
michael@0 | 837 | int ctrl_id, |
michael@0 | 838 | va_list args) |
michael@0 | 839 | { |
michael@0 | 840 | int *ref_info = va_arg(args, int *); |
michael@0 | 841 | |
michael@0 | 842 | if (ref_info && !ctx->yv12_frame_buffers.use_frame_threads) |
michael@0 | 843 | { |
michael@0 | 844 | VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0]; |
michael@0 | 845 | VP8_COMMON *oci = &pbi->common; |
michael@0 | 846 | *ref_info = |
michael@0 | 847 | (vp8dx_references_buffer( oci, ALTREF_FRAME )?VP8_ALTR_FRAME:0) | |
michael@0 | 848 | (vp8dx_references_buffer( oci, GOLDEN_FRAME )?VP8_GOLD_FRAME:0) | |
michael@0 | 849 | (vp8dx_references_buffer( oci, LAST_FRAME )?VP8_LAST_FRAME:0); |
michael@0 | 850 | |
michael@0 | 851 | return VPX_CODEC_OK; |
michael@0 | 852 | } |
michael@0 | 853 | else |
michael@0 | 854 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 855 | } |
michael@0 | 856 | |
michael@0 | 857 | static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx, |
michael@0 | 858 | int ctrl_id, |
michael@0 | 859 | va_list args) |
michael@0 | 860 | { |
michael@0 | 861 | |
michael@0 | 862 | int *corrupted = va_arg(args, int *); |
michael@0 | 863 | VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0]; |
michael@0 | 864 | |
michael@0 | 865 | if (corrupted && pbi) |
michael@0 | 866 | { |
michael@0 | 867 | *corrupted = pbi->common.frame_to_show->corrupted; |
michael@0 | 868 | |
michael@0 | 869 | return VPX_CODEC_OK; |
michael@0 | 870 | } |
michael@0 | 871 | else |
michael@0 | 872 | return VPX_CODEC_INVALID_PARAM; |
michael@0 | 873 | |
michael@0 | 874 | } |
michael@0 | 875 | |
michael@0 | 876 | static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx, |
michael@0 | 877 | int ctrl_id, |
michael@0 | 878 | va_list args) |
michael@0 | 879 | { |
michael@0 | 880 | vp8_decrypt_init *init = va_arg(args, vp8_decrypt_init *); |
michael@0 | 881 | |
michael@0 | 882 | if (init) |
michael@0 | 883 | { |
michael@0 | 884 | ctx->decrypt_cb = init->decrypt_cb; |
michael@0 | 885 | ctx->decrypt_state = init->decrypt_state; |
michael@0 | 886 | } |
michael@0 | 887 | else |
michael@0 | 888 | { |
michael@0 | 889 | ctx->decrypt_cb = NULL; |
michael@0 | 890 | ctx->decrypt_state = NULL; |
michael@0 | 891 | } |
michael@0 | 892 | return VPX_CODEC_OK; |
michael@0 | 893 | } |
michael@0 | 894 | |
michael@0 | 895 | vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = |
michael@0 | 896 | { |
michael@0 | 897 | {VP8_SET_REFERENCE, vp8_set_reference}, |
michael@0 | 898 | {VP8_COPY_REFERENCE, vp8_get_reference}, |
michael@0 | 899 | {VP8_SET_POSTPROC, vp8_set_postproc}, |
michael@0 | 900 | {VP8_SET_DBG_COLOR_REF_FRAME, vp8_set_dbg_options}, |
michael@0 | 901 | {VP8_SET_DBG_COLOR_MB_MODES, vp8_set_dbg_options}, |
michael@0 | 902 | {VP8_SET_DBG_COLOR_B_MODES, vp8_set_dbg_options}, |
michael@0 | 903 | {VP8_SET_DBG_DISPLAY_MV, vp8_set_dbg_options}, |
michael@0 | 904 | {VP8D_GET_LAST_REF_UPDATES, vp8_get_last_ref_updates}, |
michael@0 | 905 | {VP8D_GET_FRAME_CORRUPTED, vp8_get_frame_corrupted}, |
michael@0 | 906 | {VP8D_GET_LAST_REF_USED, vp8_get_last_ref_frame}, |
michael@0 | 907 | {VP8D_SET_DECRYPTOR, vp8_set_decryptor}, |
michael@0 | 908 | { -1, NULL}, |
michael@0 | 909 | }; |
michael@0 | 910 | |
michael@0 | 911 | |
michael@0 | 912 | #ifndef VERSION_STRING |
michael@0 | 913 | #define VERSION_STRING |
michael@0 | 914 | #endif |
michael@0 | 915 | CODEC_INTERFACE(vpx_codec_vp8_dx) = |
michael@0 | 916 | { |
michael@0 | 917 | "WebM Project VP8 Decoder" VERSION_STRING, |
michael@0 | 918 | VPX_CODEC_INTERNAL_ABI_VERSION, |
michael@0 | 919 | VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT | |
michael@0 | 920 | VPX_CODEC_CAP_INPUT_FRAGMENTS, |
michael@0 | 921 | /* vpx_codec_caps_t caps; */ |
michael@0 | 922 | vp8_init, /* vpx_codec_init_fn_t init; */ |
michael@0 | 923 | vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */ |
michael@0 | 924 | vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */ |
michael@0 | 925 | vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */ |
michael@0 | 926 | vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */ |
michael@0 | 927 | { |
michael@0 | 928 | vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */ |
michael@0 | 929 | vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */ |
michael@0 | 930 | vp8_decode, /* vpx_codec_decode_fn_t decode; */ |
michael@0 | 931 | vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */ |
michael@0 | 932 | }, |
michael@0 | 933 | { /* encoder functions */ |
michael@0 | 934 | NOT_IMPLEMENTED, |
michael@0 | 935 | NOT_IMPLEMENTED, |
michael@0 | 936 | NOT_IMPLEMENTED, |
michael@0 | 937 | NOT_IMPLEMENTED, |
michael@0 | 938 | NOT_IMPLEMENTED, |
michael@0 | 939 | NOT_IMPLEMENTED |
michael@0 | 940 | } |
michael@0 | 941 | }; |