media/libvpx/vp8/decoder/decodemv.c

Thu, 15 Jan 2015 15:59:08 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 15 Jan 2015 15:59:08 +0100
branch
TOR_BUG_9701
changeset 10
ac0c01689b40
permissions
-rw-r--r--

Implement a real Private Browsing Mode condition by changing the API/ABI;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.

michael@0 1 /*
michael@0 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
michael@0 3 *
michael@0 4 * Use of this source code is governed by a BSD-style license
michael@0 5 * that can be found in the LICENSE file in the root of the source
michael@0 6 * tree. An additional intellectual property rights grant can be found
michael@0 7 * in the file PATENTS. All contributing project authors may
michael@0 8 * be found in the AUTHORS file in the root of the source tree.
michael@0 9 */
michael@0 10
michael@0 11
michael@0 12 #include "treereader.h"
michael@0 13 #include "vp8/common/entropymv.h"
michael@0 14 #include "vp8/common/entropymode.h"
michael@0 15 #include "onyxd_int.h"
michael@0 16 #include "vp8/common/findnearmv.h"
michael@0 17
michael@0 18 #if CONFIG_DEBUG
michael@0 19 #include <assert.h>
michael@0 20 #endif
michael@0 21 static B_PREDICTION_MODE read_bmode(vp8_reader *bc, const vp8_prob *p)
michael@0 22 {
michael@0 23 const int i = vp8_treed_read(bc, vp8_bmode_tree, p);
michael@0 24
michael@0 25 return (B_PREDICTION_MODE)i;
michael@0 26 }
michael@0 27
michael@0 28 static MB_PREDICTION_MODE read_ymode(vp8_reader *bc, const vp8_prob *p)
michael@0 29 {
michael@0 30 const int i = vp8_treed_read(bc, vp8_ymode_tree, p);
michael@0 31
michael@0 32 return (MB_PREDICTION_MODE)i;
michael@0 33 }
michael@0 34
michael@0 35 static MB_PREDICTION_MODE read_kf_ymode(vp8_reader *bc, const vp8_prob *p)
michael@0 36 {
michael@0 37 const int i = vp8_treed_read(bc, vp8_kf_ymode_tree, p);
michael@0 38
michael@0 39 return (MB_PREDICTION_MODE)i;
michael@0 40 }
michael@0 41
michael@0 42 static MB_PREDICTION_MODE read_uv_mode(vp8_reader *bc, const vp8_prob *p)
michael@0 43 {
michael@0 44 const int i = vp8_treed_read(bc, vp8_uv_mode_tree, p);
michael@0 45
michael@0 46 return (MB_PREDICTION_MODE)i;
michael@0 47 }
michael@0 48
michael@0 49 static void read_kf_modes(VP8D_COMP *pbi, MODE_INFO *mi)
michael@0 50 {
michael@0 51 vp8_reader *const bc = & pbi->mbc[8];
michael@0 52 const int mis = pbi->common.mode_info_stride;
michael@0 53
michael@0 54 mi->mbmi.ref_frame = INTRA_FRAME;
michael@0 55 mi->mbmi.mode = read_kf_ymode(bc, vp8_kf_ymode_prob);
michael@0 56
michael@0 57 if (mi->mbmi.mode == B_PRED)
michael@0 58 {
michael@0 59 int i = 0;
michael@0 60 mi->mbmi.is_4x4 = 1;
michael@0 61
michael@0 62 do
michael@0 63 {
michael@0 64 const B_PREDICTION_MODE A = above_block_mode(mi, i, mis);
michael@0 65 const B_PREDICTION_MODE L = left_block_mode(mi, i);
michael@0 66
michael@0 67 mi->bmi[i].as_mode =
michael@0 68 read_bmode(bc, vp8_kf_bmode_prob [A] [L]);
michael@0 69 }
michael@0 70 while (++i < 16);
michael@0 71 }
michael@0 72
michael@0 73 mi->mbmi.uv_mode = read_uv_mode(bc, vp8_kf_uv_mode_prob);
michael@0 74 }
michael@0 75
michael@0 76 static int read_mvcomponent(vp8_reader *r, const MV_CONTEXT *mvc)
michael@0 77 {
michael@0 78 const vp8_prob *const p = (const vp8_prob *) mvc;
michael@0 79 int x = 0;
michael@0 80
michael@0 81 if (vp8_read(r, p [mvpis_short])) /* Large */
michael@0 82 {
michael@0 83 int i = 0;
michael@0 84
michael@0 85 do
michael@0 86 {
michael@0 87 x += vp8_read(r, p [MVPbits + i]) << i;
michael@0 88 }
michael@0 89 while (++i < 3);
michael@0 90
michael@0 91 i = mvlong_width - 1; /* Skip bit 3, which is sometimes implicit */
michael@0 92
michael@0 93 do
michael@0 94 {
michael@0 95 x += vp8_read(r, p [MVPbits + i]) << i;
michael@0 96 }
michael@0 97 while (--i > 3);
michael@0 98
michael@0 99 if (!(x & 0xFFF0) || vp8_read(r, p [MVPbits + 3]))
michael@0 100 x += 8;
michael@0 101 }
michael@0 102 else /* small */
michael@0 103 x = vp8_treed_read(r, vp8_small_mvtree, p + MVPshort);
michael@0 104
michael@0 105 if (x && vp8_read(r, p [MVPsign]))
michael@0 106 x = -x;
michael@0 107
michael@0 108 return x;
michael@0 109 }
michael@0 110
michael@0 111 static void read_mv(vp8_reader *r, MV *mv, const MV_CONTEXT *mvc)
michael@0 112 {
michael@0 113 mv->row = (short)(read_mvcomponent(r, mvc) * 2);
michael@0 114 mv->col = (short)(read_mvcomponent(r, ++mvc) * 2);
michael@0 115 }
michael@0 116
michael@0 117
michael@0 118 static void read_mvcontexts(vp8_reader *bc, MV_CONTEXT *mvc)
michael@0 119 {
michael@0 120 int i = 0;
michael@0 121
michael@0 122 do
michael@0 123 {
michael@0 124 const vp8_prob *up = vp8_mv_update_probs[i].prob;
michael@0 125 vp8_prob *p = (vp8_prob *)(mvc + i);
michael@0 126 vp8_prob *const pstop = p + MVPcount;
michael@0 127
michael@0 128 do
michael@0 129 {
michael@0 130 if (vp8_read(bc, *up++))
michael@0 131 {
michael@0 132 const vp8_prob x = (vp8_prob)vp8_read_literal(bc, 7);
michael@0 133
michael@0 134 *p = x ? x << 1 : 1;
michael@0 135 }
michael@0 136 }
michael@0 137 while (++p < pstop);
michael@0 138 }
michael@0 139 while (++i < 2);
michael@0 140 }
michael@0 141
michael@0 142 static const unsigned char mbsplit_fill_count[4] = {8, 8, 4, 1};
michael@0 143 static const unsigned char mbsplit_fill_offset[4][16] = {
michael@0 144 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
michael@0 145 { 0, 1, 4, 5, 8, 9, 12, 13, 2, 3, 6, 7, 10, 11, 14, 15},
michael@0 146 { 0, 1, 4, 5, 2, 3, 6, 7, 8, 9, 12, 13, 10, 11, 14, 15},
michael@0 147 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
michael@0 148 };
michael@0 149
michael@0 150
michael@0 151 static void mb_mode_mv_init(VP8D_COMP *pbi)
michael@0 152 {
michael@0 153 vp8_reader *const bc = & pbi->mbc[8];
michael@0 154 MV_CONTEXT *const mvc = pbi->common.fc.mvc;
michael@0 155
michael@0 156 #if CONFIG_ERROR_CONCEALMENT
michael@0 157 /* Default is that no macroblock is corrupt, therefore we initialize
michael@0 158 * mvs_corrupt_from_mb to something very big, which we can be sure is
michael@0 159 * outside the frame. */
michael@0 160 pbi->mvs_corrupt_from_mb = UINT_MAX;
michael@0 161 #endif
michael@0 162 /* Read the mb_no_coeff_skip flag */
michael@0 163 pbi->common.mb_no_coeff_skip = (int)vp8_read_bit(bc);
michael@0 164
michael@0 165 pbi->prob_skip_false = 0;
michael@0 166 if (pbi->common.mb_no_coeff_skip)
michael@0 167 pbi->prob_skip_false = (vp8_prob)vp8_read_literal(bc, 8);
michael@0 168
michael@0 169 if(pbi->common.frame_type != KEY_FRAME)
michael@0 170 {
michael@0 171 pbi->prob_intra = (vp8_prob)vp8_read_literal(bc, 8);
michael@0 172 pbi->prob_last = (vp8_prob)vp8_read_literal(bc, 8);
michael@0 173 pbi->prob_gf = (vp8_prob)vp8_read_literal(bc, 8);
michael@0 174
michael@0 175 if (vp8_read_bit(bc))
michael@0 176 {
michael@0 177 int i = 0;
michael@0 178
michael@0 179 do
michael@0 180 {
michael@0 181 pbi->common.fc.ymode_prob[i] =
michael@0 182 (vp8_prob) vp8_read_literal(bc, 8);
michael@0 183 }
michael@0 184 while (++i < 4);
michael@0 185 }
michael@0 186
michael@0 187 if (vp8_read_bit(bc))
michael@0 188 {
michael@0 189 int i = 0;
michael@0 190
michael@0 191 do
michael@0 192 {
michael@0 193 pbi->common.fc.uv_mode_prob[i] =
michael@0 194 (vp8_prob) vp8_read_literal(bc, 8);
michael@0 195 }
michael@0 196 while (++i < 3);
michael@0 197 }
michael@0 198
michael@0 199 read_mvcontexts(bc, mvc);
michael@0 200 }
michael@0 201 }
michael@0 202
michael@0 203 const vp8_prob vp8_sub_mv_ref_prob3 [8][VP8_SUBMVREFS-1] =
michael@0 204 {
michael@0 205 { 147, 136, 18 }, /* SUBMVREF_NORMAL */
michael@0 206 { 223, 1 , 34 }, /* SUBMVREF_LEFT_ABOVE_SAME */
michael@0 207 { 106, 145, 1 }, /* SUBMVREF_LEFT_ZED */
michael@0 208 { 208, 1 , 1 }, /* SUBMVREF_LEFT_ABOVE_ZED */
michael@0 209 { 179, 121, 1 }, /* SUBMVREF_ABOVE_ZED */
michael@0 210 { 223, 1 , 34 }, /* SUBMVREF_LEFT_ABOVE_SAME */
michael@0 211 { 179, 121, 1 }, /* SUBMVREF_ABOVE_ZED */
michael@0 212 { 208, 1 , 1 } /* SUBMVREF_LEFT_ABOVE_ZED */
michael@0 213 };
michael@0 214
michael@0 215 static
michael@0 216 const vp8_prob * get_sub_mv_ref_prob(const int left, const int above)
michael@0 217 {
michael@0 218 int lez = (left == 0);
michael@0 219 int aez = (above == 0);
michael@0 220 int lea = (left == above);
michael@0 221 const vp8_prob * prob;
michael@0 222
michael@0 223 prob = vp8_sub_mv_ref_prob3[(aez << 2) |
michael@0 224 (lez << 1) |
michael@0 225 (lea)];
michael@0 226
michael@0 227 return prob;
michael@0 228 }
michael@0 229
michael@0 230 static void decode_split_mv(vp8_reader *const bc, MODE_INFO *mi,
michael@0 231 const MODE_INFO *left_mb, const MODE_INFO *above_mb,
michael@0 232 MB_MODE_INFO *mbmi, int_mv best_mv,
michael@0 233 MV_CONTEXT *const mvc, int mb_to_left_edge,
michael@0 234 int mb_to_right_edge, int mb_to_top_edge,
michael@0 235 int mb_to_bottom_edge)
michael@0 236 {
michael@0 237 int s; /* split configuration (16x8, 8x16, 8x8, 4x4) */
michael@0 238 int num_p; /* number of partitions in the split configuration
michael@0 239 (see vp8_mbsplit_count) */
michael@0 240 int j = 0;
michael@0 241
michael@0 242 s = 3;
michael@0 243 num_p = 16;
michael@0 244 if( vp8_read(bc, 110) )
michael@0 245 {
michael@0 246 s = 2;
michael@0 247 num_p = 4;
michael@0 248 if( vp8_read(bc, 111) )
michael@0 249 {
michael@0 250 s = vp8_read(bc, 150);
michael@0 251 num_p = 2;
michael@0 252 }
michael@0 253 }
michael@0 254
michael@0 255 do /* for each subset j */
michael@0 256 {
michael@0 257 int_mv leftmv, abovemv;
michael@0 258 int_mv blockmv;
michael@0 259 int k; /* first block in subset j */
michael@0 260
michael@0 261 const vp8_prob *prob;
michael@0 262 k = vp8_mbsplit_offset[s][j];
michael@0 263
michael@0 264 if (!(k & 3))
michael@0 265 {
michael@0 266 /* On L edge, get from MB to left of us */
michael@0 267 if(left_mb->mbmi.mode != SPLITMV)
michael@0 268 leftmv.as_int = left_mb->mbmi.mv.as_int;
michael@0 269 else
michael@0 270 leftmv.as_int = (left_mb->bmi + k + 4 - 1)->mv.as_int;
michael@0 271 }
michael@0 272 else
michael@0 273 leftmv.as_int = (mi->bmi + k - 1)->mv.as_int;
michael@0 274
michael@0 275 if (!(k >> 2))
michael@0 276 {
michael@0 277 /* On top edge, get from MB above us */
michael@0 278 if(above_mb->mbmi.mode != SPLITMV)
michael@0 279 abovemv.as_int = above_mb->mbmi.mv.as_int;
michael@0 280 else
michael@0 281 abovemv.as_int = (above_mb->bmi + k + 16 - 4)->mv.as_int;
michael@0 282 }
michael@0 283 else
michael@0 284 abovemv.as_int = (mi->bmi + k - 4)->mv.as_int;
michael@0 285
michael@0 286 prob = get_sub_mv_ref_prob(leftmv.as_int, abovemv.as_int);
michael@0 287
michael@0 288 if( vp8_read(bc, prob[0]) )
michael@0 289 {
michael@0 290 if( vp8_read(bc, prob[1]) )
michael@0 291 {
michael@0 292 blockmv.as_int = 0;
michael@0 293 if( vp8_read(bc, prob[2]) )
michael@0 294 {
michael@0 295 blockmv.as_mv.row = read_mvcomponent(bc, &mvc[0]) * 2;
michael@0 296 blockmv.as_mv.row += best_mv.as_mv.row;
michael@0 297 blockmv.as_mv.col = read_mvcomponent(bc, &mvc[1]) * 2;
michael@0 298 blockmv.as_mv.col += best_mv.as_mv.col;
michael@0 299 }
michael@0 300 }
michael@0 301 else
michael@0 302 {
michael@0 303 blockmv.as_int = abovemv.as_int;
michael@0 304 }
michael@0 305 }
michael@0 306 else
michael@0 307 {
michael@0 308 blockmv.as_int = leftmv.as_int;
michael@0 309 }
michael@0 310
michael@0 311 mbmi->need_to_clamp_mvs |= vp8_check_mv_bounds(&blockmv,
michael@0 312 mb_to_left_edge,
michael@0 313 mb_to_right_edge,
michael@0 314 mb_to_top_edge,
michael@0 315 mb_to_bottom_edge);
michael@0 316
michael@0 317 {
michael@0 318 /* Fill (uniform) modes, mvs of jth subset.
michael@0 319 Must do it here because ensuing subsets can
michael@0 320 refer back to us via "left" or "above". */
michael@0 321 const unsigned char *fill_offset;
michael@0 322 unsigned int fill_count = mbsplit_fill_count[s];
michael@0 323
michael@0 324 fill_offset = &mbsplit_fill_offset[s]
michael@0 325 [(unsigned char)j * mbsplit_fill_count[s]];
michael@0 326
michael@0 327 do {
michael@0 328 mi->bmi[ *fill_offset].mv.as_int = blockmv.as_int;
michael@0 329 fill_offset++;
michael@0 330 }while (--fill_count);
michael@0 331 }
michael@0 332
michael@0 333 }
michael@0 334 while (++j < num_p);
michael@0 335
michael@0 336 mbmi->partitioning = s;
michael@0 337 }
michael@0 338
michael@0 339 static void read_mb_modes_mv(VP8D_COMP *pbi, MODE_INFO *mi, MB_MODE_INFO *mbmi)
michael@0 340 {
michael@0 341 vp8_reader *const bc = & pbi->mbc[8];
michael@0 342 mbmi->ref_frame = (MV_REFERENCE_FRAME) vp8_read(bc, pbi->prob_intra);
michael@0 343 if (mbmi->ref_frame) /* inter MB */
michael@0 344 {
michael@0 345 enum {CNT_INTRA, CNT_NEAREST, CNT_NEAR, CNT_SPLITMV};
michael@0 346 int cnt[4];
michael@0 347 int *cntx = cnt;
michael@0 348 int_mv near_mvs[4];
michael@0 349 int_mv *nmv = near_mvs;
michael@0 350 const int mis = pbi->mb.mode_info_stride;
michael@0 351 const MODE_INFO *above = mi - mis;
michael@0 352 const MODE_INFO *left = mi - 1;
michael@0 353 const MODE_INFO *aboveleft = above - 1;
michael@0 354 int *ref_frame_sign_bias = pbi->common.ref_frame_sign_bias;
michael@0 355
michael@0 356 mbmi->need_to_clamp_mvs = 0;
michael@0 357
michael@0 358 if (vp8_read(bc, pbi->prob_last))
michael@0 359 {
michael@0 360 mbmi->ref_frame =
michael@0 361 (MV_REFERENCE_FRAME)((int)(2 + vp8_read(bc, pbi->prob_gf)));
michael@0 362 }
michael@0 363
michael@0 364 /* Zero accumulators */
michael@0 365 nmv[0].as_int = nmv[1].as_int = nmv[2].as_int = 0;
michael@0 366 cnt[0] = cnt[1] = cnt[2] = cnt[3] = 0;
michael@0 367
michael@0 368 /* Process above */
michael@0 369 if (above->mbmi.ref_frame != INTRA_FRAME)
michael@0 370 {
michael@0 371 if (above->mbmi.mv.as_int)
michael@0 372 {
michael@0 373 (++nmv)->as_int = above->mbmi.mv.as_int;
michael@0 374 mv_bias(ref_frame_sign_bias[above->mbmi.ref_frame],
michael@0 375 mbmi->ref_frame, nmv, ref_frame_sign_bias);
michael@0 376 ++cntx;
michael@0 377 }
michael@0 378
michael@0 379 *cntx += 2;
michael@0 380 }
michael@0 381
michael@0 382 /* Process left */
michael@0 383 if (left->mbmi.ref_frame != INTRA_FRAME)
michael@0 384 {
michael@0 385 if (left->mbmi.mv.as_int)
michael@0 386 {
michael@0 387 int_mv this_mv;
michael@0 388
michael@0 389 this_mv.as_int = left->mbmi.mv.as_int;
michael@0 390 mv_bias(ref_frame_sign_bias[left->mbmi.ref_frame],
michael@0 391 mbmi->ref_frame, &this_mv, ref_frame_sign_bias);
michael@0 392
michael@0 393 if (this_mv.as_int != nmv->as_int)
michael@0 394 {
michael@0 395 (++nmv)->as_int = this_mv.as_int;
michael@0 396 ++cntx;
michael@0 397 }
michael@0 398
michael@0 399 *cntx += 2;
michael@0 400 }
michael@0 401 else
michael@0 402 cnt[CNT_INTRA] += 2;
michael@0 403 }
michael@0 404
michael@0 405 /* Process above left */
michael@0 406 if (aboveleft->mbmi.ref_frame != INTRA_FRAME)
michael@0 407 {
michael@0 408 if (aboveleft->mbmi.mv.as_int)
michael@0 409 {
michael@0 410 int_mv this_mv;
michael@0 411
michael@0 412 this_mv.as_int = aboveleft->mbmi.mv.as_int;
michael@0 413 mv_bias(ref_frame_sign_bias[aboveleft->mbmi.ref_frame],
michael@0 414 mbmi->ref_frame, &this_mv, ref_frame_sign_bias);
michael@0 415
michael@0 416 if (this_mv.as_int != nmv->as_int)
michael@0 417 {
michael@0 418 (++nmv)->as_int = this_mv.as_int;
michael@0 419 ++cntx;
michael@0 420 }
michael@0 421
michael@0 422 *cntx += 1;
michael@0 423 }
michael@0 424 else
michael@0 425 cnt[CNT_INTRA] += 1;
michael@0 426 }
michael@0 427
michael@0 428 if( vp8_read(bc, vp8_mode_contexts [cnt[CNT_INTRA]] [0]) )
michael@0 429 {
michael@0 430
michael@0 431 /* If we have three distinct MV's ... */
michael@0 432 /* See if above-left MV can be merged with NEAREST */
michael@0 433 cnt[CNT_NEAREST] += ( (cnt[CNT_SPLITMV] > 0) &
michael@0 434 (nmv->as_int == near_mvs[CNT_NEAREST].as_int));
michael@0 435
michael@0 436 /* Swap near and nearest if necessary */
michael@0 437 if (cnt[CNT_NEAR] > cnt[CNT_NEAREST])
michael@0 438 {
michael@0 439 int tmp;
michael@0 440 tmp = cnt[CNT_NEAREST];
michael@0 441 cnt[CNT_NEAREST] = cnt[CNT_NEAR];
michael@0 442 cnt[CNT_NEAR] = tmp;
michael@0 443 tmp = near_mvs[CNT_NEAREST].as_int;
michael@0 444 near_mvs[CNT_NEAREST].as_int = near_mvs[CNT_NEAR].as_int;
michael@0 445 near_mvs[CNT_NEAR].as_int = tmp;
michael@0 446 }
michael@0 447
michael@0 448 if( vp8_read(bc, vp8_mode_contexts [cnt[CNT_NEAREST]] [1]) )
michael@0 449 {
michael@0 450
michael@0 451 if( vp8_read(bc, vp8_mode_contexts [cnt[CNT_NEAR]] [2]) )
michael@0 452 {
michael@0 453 int mb_to_top_edge;
michael@0 454 int mb_to_bottom_edge;
michael@0 455 int mb_to_left_edge;
michael@0 456 int mb_to_right_edge;
michael@0 457 MV_CONTEXT *const mvc = pbi->common.fc.mvc;
michael@0 458 int near_index;
michael@0 459
michael@0 460 mb_to_top_edge = pbi->mb.mb_to_top_edge;
michael@0 461 mb_to_bottom_edge = pbi->mb.mb_to_bottom_edge;
michael@0 462 mb_to_top_edge -= LEFT_TOP_MARGIN;
michael@0 463 mb_to_bottom_edge += RIGHT_BOTTOM_MARGIN;
michael@0 464 mb_to_right_edge = pbi->mb.mb_to_right_edge;
michael@0 465 mb_to_right_edge += RIGHT_BOTTOM_MARGIN;
michael@0 466 mb_to_left_edge = pbi->mb.mb_to_left_edge;
michael@0 467 mb_to_left_edge -= LEFT_TOP_MARGIN;
michael@0 468
michael@0 469 /* Use near_mvs[0] to store the "best" MV */
michael@0 470 near_index = CNT_INTRA +
michael@0 471 (cnt[CNT_NEAREST] >= cnt[CNT_INTRA]);
michael@0 472
michael@0 473 vp8_clamp_mv2(&near_mvs[near_index], &pbi->mb);
michael@0 474
michael@0 475 cnt[CNT_SPLITMV] = ((above->mbmi.mode == SPLITMV)
michael@0 476 + (left->mbmi.mode == SPLITMV)) * 2
michael@0 477 + (aboveleft->mbmi.mode == SPLITMV);
michael@0 478
michael@0 479 if( vp8_read(bc, vp8_mode_contexts [cnt[CNT_SPLITMV]] [3]) )
michael@0 480 {
michael@0 481 decode_split_mv(bc, mi, left, above,
michael@0 482 mbmi,
michael@0 483 near_mvs[near_index],
michael@0 484 mvc, mb_to_left_edge,
michael@0 485 mb_to_right_edge,
michael@0 486 mb_to_top_edge,
michael@0 487 mb_to_bottom_edge);
michael@0 488 mbmi->mv.as_int = mi->bmi[15].mv.as_int;
michael@0 489 mbmi->mode = SPLITMV;
michael@0 490 mbmi->is_4x4 = 1;
michael@0 491 }
michael@0 492 else
michael@0 493 {
michael@0 494 int_mv *const mbmi_mv = & mbmi->mv;
michael@0 495 read_mv(bc, &mbmi_mv->as_mv, (const MV_CONTEXT *) mvc);
michael@0 496 mbmi_mv->as_mv.row += near_mvs[near_index].as_mv.row;
michael@0 497 mbmi_mv->as_mv.col += near_mvs[near_index].as_mv.col;
michael@0 498
michael@0 499 /* Don't need to check this on NEARMV and NEARESTMV
michael@0 500 * modes since those modes clamp the MV. The NEWMV mode
michael@0 501 * does not, so signal to the prediction stage whether
michael@0 502 * special handling may be required.
michael@0 503 */
michael@0 504 mbmi->need_to_clamp_mvs =
michael@0 505 vp8_check_mv_bounds(mbmi_mv, mb_to_left_edge,
michael@0 506 mb_to_right_edge,
michael@0 507 mb_to_top_edge,
michael@0 508 mb_to_bottom_edge);
michael@0 509 mbmi->mode = NEWMV;
michael@0 510 }
michael@0 511 }
michael@0 512 else
michael@0 513 {
michael@0 514 mbmi->mode = NEARMV;
michael@0 515 mbmi->mv.as_int = near_mvs[CNT_NEAR].as_int;
michael@0 516 vp8_clamp_mv2(&mbmi->mv, &pbi->mb);
michael@0 517 }
michael@0 518 }
michael@0 519 else
michael@0 520 {
michael@0 521 mbmi->mode = NEARESTMV;
michael@0 522 mbmi->mv.as_int = near_mvs[CNT_NEAREST].as_int;
michael@0 523 vp8_clamp_mv2(&mbmi->mv, &pbi->mb);
michael@0 524 }
michael@0 525 }
michael@0 526 else
michael@0 527 {
michael@0 528 mbmi->mode = ZEROMV;
michael@0 529 mbmi->mv.as_int = 0;
michael@0 530 }
michael@0 531
michael@0 532 #if CONFIG_ERROR_CONCEALMENT
michael@0 533 if(pbi->ec_enabled && (mbmi->mode != SPLITMV))
michael@0 534 {
michael@0 535 mi->bmi[ 0].mv.as_int =
michael@0 536 mi->bmi[ 1].mv.as_int =
michael@0 537 mi->bmi[ 2].mv.as_int =
michael@0 538 mi->bmi[ 3].mv.as_int =
michael@0 539 mi->bmi[ 4].mv.as_int =
michael@0 540 mi->bmi[ 5].mv.as_int =
michael@0 541 mi->bmi[ 6].mv.as_int =
michael@0 542 mi->bmi[ 7].mv.as_int =
michael@0 543 mi->bmi[ 8].mv.as_int =
michael@0 544 mi->bmi[ 9].mv.as_int =
michael@0 545 mi->bmi[10].mv.as_int =
michael@0 546 mi->bmi[11].mv.as_int =
michael@0 547 mi->bmi[12].mv.as_int =
michael@0 548 mi->bmi[13].mv.as_int =
michael@0 549 mi->bmi[14].mv.as_int =
michael@0 550 mi->bmi[15].mv.as_int = mbmi->mv.as_int;
michael@0 551 }
michael@0 552 #endif
michael@0 553 }
michael@0 554 else
michael@0 555 {
michael@0 556 /* required for left and above block mv */
michael@0 557 mbmi->mv.as_int = 0;
michael@0 558
michael@0 559 /* MB is intra coded */
michael@0 560 if ((mbmi->mode = read_ymode(bc, pbi->common.fc.ymode_prob)) == B_PRED)
michael@0 561 {
michael@0 562 int j = 0;
michael@0 563 mbmi->is_4x4 = 1;
michael@0 564 do
michael@0 565 {
michael@0 566 mi->bmi[j].as_mode = read_bmode(bc, pbi->common.fc.bmode_prob);
michael@0 567 }
michael@0 568 while (++j < 16);
michael@0 569 }
michael@0 570
michael@0 571 mbmi->uv_mode = read_uv_mode(bc, pbi->common.fc.uv_mode_prob);
michael@0 572 }
michael@0 573
michael@0 574 }
michael@0 575
michael@0 576 static void read_mb_features(vp8_reader *r, MB_MODE_INFO *mi, MACROBLOCKD *x)
michael@0 577 {
michael@0 578 /* Is segmentation enabled */
michael@0 579 if (x->segmentation_enabled && x->update_mb_segmentation_map)
michael@0 580 {
michael@0 581 /* If so then read the segment id. */
michael@0 582 if (vp8_read(r, x->mb_segment_tree_probs[0]))
michael@0 583 mi->segment_id =
michael@0 584 (unsigned char)(2 + vp8_read(r, x->mb_segment_tree_probs[2]));
michael@0 585 else
michael@0 586 mi->segment_id =
michael@0 587 (unsigned char)(vp8_read(r, x->mb_segment_tree_probs[1]));
michael@0 588 }
michael@0 589 }
michael@0 590
michael@0 591 static void decode_mb_mode_mvs(VP8D_COMP *pbi, MODE_INFO *mi,
michael@0 592 MB_MODE_INFO *mbmi)
michael@0 593 {
michael@0 594 /* Read the Macroblock segmentation map if it is being updated explicitly
michael@0 595 * this frame (reset to 0 above by default)
michael@0 596 * By default on a key frame reset all MBs to segment 0
michael@0 597 */
michael@0 598 if (pbi->mb.update_mb_segmentation_map)
michael@0 599 read_mb_features(&pbi->mbc[8], &mi->mbmi, &pbi->mb);
michael@0 600 else if(pbi->common.frame_type == KEY_FRAME)
michael@0 601 mi->mbmi.segment_id = 0;
michael@0 602
michael@0 603 /* Read the macroblock coeff skip flag if this feature is in use,
michael@0 604 * else default to 0 */
michael@0 605 if (pbi->common.mb_no_coeff_skip)
michael@0 606 mi->mbmi.mb_skip_coeff = vp8_read(&pbi->mbc[8], pbi->prob_skip_false);
michael@0 607 else
michael@0 608 mi->mbmi.mb_skip_coeff = 0;
michael@0 609
michael@0 610 mi->mbmi.is_4x4 = 0;
michael@0 611 if(pbi->common.frame_type == KEY_FRAME)
michael@0 612 read_kf_modes(pbi, mi);
michael@0 613 else
michael@0 614 read_mb_modes_mv(pbi, mi, &mi->mbmi);
michael@0 615
michael@0 616 }
michael@0 617
michael@0 618 void vp8_decode_mode_mvs(VP8D_COMP *pbi)
michael@0 619 {
michael@0 620 MODE_INFO *mi = pbi->common.mi;
michael@0 621 int mb_row = -1;
michael@0 622 int mb_to_right_edge_start;
michael@0 623
michael@0 624 mb_mode_mv_init(pbi);
michael@0 625
michael@0 626 pbi->mb.mb_to_top_edge = 0;
michael@0 627 pbi->mb.mb_to_bottom_edge = ((pbi->common.mb_rows - 1) * 16) << 3;
michael@0 628 mb_to_right_edge_start = ((pbi->common.mb_cols - 1) * 16) << 3;
michael@0 629
michael@0 630 while (++mb_row < pbi->common.mb_rows)
michael@0 631 {
michael@0 632 int mb_col = -1;
michael@0 633
michael@0 634 pbi->mb.mb_to_left_edge = 0;
michael@0 635 pbi->mb.mb_to_right_edge = mb_to_right_edge_start;
michael@0 636
michael@0 637 while (++mb_col < pbi->common.mb_cols)
michael@0 638 {
michael@0 639 #if CONFIG_ERROR_CONCEALMENT
michael@0 640 int mb_num = mb_row * pbi->common.mb_cols + mb_col;
michael@0 641 #endif
michael@0 642
michael@0 643 decode_mb_mode_mvs(pbi, mi, &mi->mbmi);
michael@0 644
michael@0 645 #if CONFIG_ERROR_CONCEALMENT
michael@0 646 /* look for corruption. set mvs_corrupt_from_mb to the current
michael@0 647 * mb_num if the frame is corrupt from this macroblock. */
michael@0 648 if (vp8dx_bool_error(&pbi->mbc[8]) && mb_num <
michael@0 649 (int)pbi->mvs_corrupt_from_mb)
michael@0 650 {
michael@0 651 pbi->mvs_corrupt_from_mb = mb_num;
michael@0 652 /* no need to continue since the partition is corrupt from
michael@0 653 * here on.
michael@0 654 */
michael@0 655 return;
michael@0 656 }
michael@0 657 #endif
michael@0 658
michael@0 659 pbi->mb.mb_to_left_edge -= (16 << 3);
michael@0 660 pbi->mb.mb_to_right_edge -= (16 << 3);
michael@0 661 mi++; /* next macroblock */
michael@0 662 }
michael@0 663 pbi->mb.mb_to_top_edge -= (16 << 3);
michael@0 664 pbi->mb.mb_to_bottom_edge -= (16 << 3);
michael@0 665
michael@0 666 mi++; /* skip left predictor each row */
michael@0 667 }
michael@0 668 }

mercurial