Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
michael@0 | 1 | /* |
michael@0 | 2 | * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
michael@0 | 3 | * |
michael@0 | 4 | * Use of this source code is governed by a BSD-style license |
michael@0 | 5 | * that can be found in the LICENSE file in the root of the source |
michael@0 | 6 | * tree. An additional intellectual property rights grant can be found |
michael@0 | 7 | * in the file PATENTS. All contributing project authors may |
michael@0 | 8 | * be found in the AUTHORS file in the root of the source tree. |
michael@0 | 9 | */ |
michael@0 | 10 | |
michael@0 | 11 | #include <assert.h> |
michael@0 | 12 | #include <stdio.h> |
michael@0 | 13 | #include <limits.h> |
michael@0 | 14 | |
michael@0 | 15 | #include "vpx/vpx_encoder.h" |
michael@0 | 16 | #include "vpx_mem/vpx_mem.h" |
michael@0 | 17 | |
michael@0 | 18 | #include "vp9/common/vp9_entropymode.h" |
michael@0 | 19 | #include "vp9/common/vp9_entropymv.h" |
michael@0 | 20 | #include "vp9/common/vp9_findnearmv.h" |
michael@0 | 21 | #include "vp9/common/vp9_tile_common.h" |
michael@0 | 22 | #include "vp9/common/vp9_seg_common.h" |
michael@0 | 23 | #include "vp9/common/vp9_pred_common.h" |
michael@0 | 24 | #include "vp9/common/vp9_entropy.h" |
michael@0 | 25 | #include "vp9/common/vp9_mvref_common.h" |
michael@0 | 26 | #include "vp9/common/vp9_treecoder.h" |
michael@0 | 27 | #include "vp9/common/vp9_systemdependent.h" |
michael@0 | 28 | #include "vp9/common/vp9_pragmas.h" |
michael@0 | 29 | |
michael@0 | 30 | #include "vp9/encoder/vp9_mcomp.h" |
michael@0 | 31 | #include "vp9/encoder/vp9_encodemv.h" |
michael@0 | 32 | #include "vp9/encoder/vp9_bitstream.h" |
michael@0 | 33 | #include "vp9/encoder/vp9_segmentation.h" |
michael@0 | 34 | #include "vp9/encoder/vp9_subexp.h" |
michael@0 | 35 | #include "vp9/encoder/vp9_write_bit_buffer.h" |
michael@0 | 36 | |
michael@0 | 37 | |
michael@0 | 38 | #if defined(SECTIONBITS_OUTPUT) |
michael@0 | 39 | unsigned __int64 Sectionbits[500]; |
michael@0 | 40 | #endif |
michael@0 | 41 | |
michael@0 | 42 | #ifdef ENTROPY_STATS |
michael@0 | 43 | int intra_mode_stats[INTRA_MODES] |
michael@0 | 44 | [INTRA_MODES] |
michael@0 | 45 | [INTRA_MODES]; |
michael@0 | 46 | vp9_coeff_stats tree_update_hist[TX_SIZES][BLOCK_TYPES]; |
michael@0 | 47 | |
michael@0 | 48 | extern unsigned int active_section; |
michael@0 | 49 | #endif |
michael@0 | 50 | |
michael@0 | 51 | |
michael@0 | 52 | #ifdef MODE_STATS |
michael@0 | 53 | int64_t tx_count_32x32p_stats[TX_SIZE_CONTEXTS][TX_SIZES]; |
michael@0 | 54 | int64_t tx_count_16x16p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 1]; |
michael@0 | 55 | int64_t tx_count_8x8p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 2]; |
michael@0 | 56 | int64_t switchable_interp_stats[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS]; |
michael@0 | 57 | |
michael@0 | 58 | void init_tx_count_stats() { |
michael@0 | 59 | vp9_zero(tx_count_32x32p_stats); |
michael@0 | 60 | vp9_zero(tx_count_16x16p_stats); |
michael@0 | 61 | vp9_zero(tx_count_8x8p_stats); |
michael@0 | 62 | } |
michael@0 | 63 | |
michael@0 | 64 | void init_switchable_interp_stats() { |
michael@0 | 65 | vp9_zero(switchable_interp_stats); |
michael@0 | 66 | } |
michael@0 | 67 | |
michael@0 | 68 | static void update_tx_count_stats(VP9_COMMON *cm) { |
michael@0 | 69 | int i, j; |
michael@0 | 70 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 71 | for (j = 0; j < TX_SIZES; j++) { |
michael@0 | 72 | tx_count_32x32p_stats[i][j] += cm->fc.tx_count_32x32p[i][j]; |
michael@0 | 73 | } |
michael@0 | 74 | } |
michael@0 | 75 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 76 | for (j = 0; j < TX_SIZES - 1; j++) { |
michael@0 | 77 | tx_count_16x16p_stats[i][j] += cm->fc.tx_count_16x16p[i][j]; |
michael@0 | 78 | } |
michael@0 | 79 | } |
michael@0 | 80 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 81 | for (j = 0; j < TX_SIZES - 2; j++) { |
michael@0 | 82 | tx_count_8x8p_stats[i][j] += cm->fc.tx_count_8x8p[i][j]; |
michael@0 | 83 | } |
michael@0 | 84 | } |
michael@0 | 85 | } |
michael@0 | 86 | |
michael@0 | 87 | static void update_switchable_interp_stats(VP9_COMMON *cm) { |
michael@0 | 88 | int i, j; |
michael@0 | 89 | for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; ++i) |
michael@0 | 90 | for (j = 0; j < SWITCHABLE_FILTERS; ++j) |
michael@0 | 91 | switchable_interp_stats[i][j] += cm->fc.switchable_interp_count[i][j]; |
michael@0 | 92 | } |
michael@0 | 93 | |
michael@0 | 94 | void write_tx_count_stats() { |
michael@0 | 95 | int i, j; |
michael@0 | 96 | FILE *fp = fopen("tx_count.bin", "wb"); |
michael@0 | 97 | fwrite(tx_count_32x32p_stats, sizeof(tx_count_32x32p_stats), 1, fp); |
michael@0 | 98 | fwrite(tx_count_16x16p_stats, sizeof(tx_count_16x16p_stats), 1, fp); |
michael@0 | 99 | fwrite(tx_count_8x8p_stats, sizeof(tx_count_8x8p_stats), 1, fp); |
michael@0 | 100 | fclose(fp); |
michael@0 | 101 | |
michael@0 | 102 | printf( |
michael@0 | 103 | "vp9_default_tx_count_32x32p[TX_SIZE_CONTEXTS][TX_SIZES] = {\n"); |
michael@0 | 104 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 105 | printf(" { "); |
michael@0 | 106 | for (j = 0; j < TX_SIZES; j++) { |
michael@0 | 107 | printf("%"PRId64", ", tx_count_32x32p_stats[i][j]); |
michael@0 | 108 | } |
michael@0 | 109 | printf("},\n"); |
michael@0 | 110 | } |
michael@0 | 111 | printf("};\n"); |
michael@0 | 112 | printf( |
michael@0 | 113 | "vp9_default_tx_count_16x16p[TX_SIZE_CONTEXTS][TX_SIZES-1] = {\n"); |
michael@0 | 114 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 115 | printf(" { "); |
michael@0 | 116 | for (j = 0; j < TX_SIZES - 1; j++) { |
michael@0 | 117 | printf("%"PRId64", ", tx_count_16x16p_stats[i][j]); |
michael@0 | 118 | } |
michael@0 | 119 | printf("},\n"); |
michael@0 | 120 | } |
michael@0 | 121 | printf("};\n"); |
michael@0 | 122 | printf( |
michael@0 | 123 | "vp9_default_tx_count_8x8p[TX_SIZE_CONTEXTS][TX_SIZES-2] = {\n"); |
michael@0 | 124 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 125 | printf(" { "); |
michael@0 | 126 | for (j = 0; j < TX_SIZES - 2; j++) { |
michael@0 | 127 | printf("%"PRId64", ", tx_count_8x8p_stats[i][j]); |
michael@0 | 128 | } |
michael@0 | 129 | printf("},\n"); |
michael@0 | 130 | } |
michael@0 | 131 | printf("};\n"); |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | void write_switchable_interp_stats() { |
michael@0 | 135 | int i, j; |
michael@0 | 136 | FILE *fp = fopen("switchable_interp.bin", "wb"); |
michael@0 | 137 | fwrite(switchable_interp_stats, sizeof(switchable_interp_stats), 1, fp); |
michael@0 | 138 | fclose(fp); |
michael@0 | 139 | |
michael@0 | 140 | printf( |
michael@0 | 141 | "vp9_default_switchable_filter_count[SWITCHABLE_FILTER_CONTEXTS]" |
michael@0 | 142 | "[SWITCHABLE_FILTERS] = {\n"); |
michael@0 | 143 | for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; i++) { |
michael@0 | 144 | printf(" { "); |
michael@0 | 145 | for (j = 0; j < SWITCHABLE_FILTERS; j++) { |
michael@0 | 146 | printf("%"PRId64", ", switchable_interp_stats[i][j]); |
michael@0 | 147 | } |
michael@0 | 148 | printf("},\n"); |
michael@0 | 149 | } |
michael@0 | 150 | printf("};\n"); |
michael@0 | 151 | } |
michael@0 | 152 | #endif |
michael@0 | 153 | |
michael@0 | 154 | static INLINE void write_be32(uint8_t *p, int value) { |
michael@0 | 155 | p[0] = value >> 24; |
michael@0 | 156 | p[1] = value >> 16; |
michael@0 | 157 | p[2] = value >> 8; |
michael@0 | 158 | p[3] = value; |
michael@0 | 159 | } |
michael@0 | 160 | |
michael@0 | 161 | void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb, |
michael@0 | 162 | int data, int max) { |
michael@0 | 163 | vp9_wb_write_literal(wb, data, get_unsigned_bits(max)); |
michael@0 | 164 | } |
michael@0 | 165 | |
michael@0 | 166 | static void update_mode(vp9_writer *w, int n, vp9_tree tree, |
michael@0 | 167 | vp9_prob Pcur[/* n-1 */], |
michael@0 | 168 | unsigned int bct[/* n-1 */][2], |
michael@0 | 169 | const unsigned int num_events[/* n */]) { |
michael@0 | 170 | int i = 0; |
michael@0 | 171 | |
michael@0 | 172 | vp9_tree_probs_from_distribution(tree, bct, num_events); |
michael@0 | 173 | for (i = 0; i < n - 1; ++i) |
michael@0 | 174 | vp9_cond_prob_diff_update(w, &Pcur[i], bct[i]); |
michael@0 | 175 | } |
michael@0 | 176 | |
michael@0 | 177 | static void update_mbintra_mode_probs(VP9_COMP* const cpi, |
michael@0 | 178 | vp9_writer* const bc) { |
michael@0 | 179 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 180 | int j; |
michael@0 | 181 | unsigned int bct[INTRA_MODES - 1][2]; |
michael@0 | 182 | |
michael@0 | 183 | for (j = 0; j < BLOCK_SIZE_GROUPS; j++) |
michael@0 | 184 | update_mode(bc, INTRA_MODES, vp9_intra_mode_tree, |
michael@0 | 185 | cm->fc.y_mode_prob[j], bct, |
michael@0 | 186 | (unsigned int *)cpi->y_mode_count[j]); |
michael@0 | 187 | } |
michael@0 | 188 | |
michael@0 | 189 | static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m, |
michael@0 | 190 | TX_SIZE tx_size, BLOCK_SIZE bsize, |
michael@0 | 191 | vp9_writer *w) { |
michael@0 | 192 | const TX_SIZE max_tx_size = max_txsize_lookup[bsize]; |
michael@0 | 193 | const MACROBLOCKD *const xd = &cpi->mb.e_mbd; |
michael@0 | 194 | const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd, |
michael@0 | 195 | &cpi->common.fc.tx_probs); |
michael@0 | 196 | vp9_write(w, tx_size != TX_4X4, tx_probs[0]); |
michael@0 | 197 | if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) { |
michael@0 | 198 | vp9_write(w, tx_size != TX_8X8, tx_probs[1]); |
michael@0 | 199 | if (tx_size != TX_8X8 && max_tx_size >= TX_32X32) |
michael@0 | 200 | vp9_write(w, tx_size != TX_16X16, tx_probs[2]); |
michael@0 | 201 | } |
michael@0 | 202 | } |
michael@0 | 203 | |
michael@0 | 204 | static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m, |
michael@0 | 205 | vp9_writer *w) { |
michael@0 | 206 | const MACROBLOCKD *const xd = &cpi->mb.e_mbd; |
michael@0 | 207 | if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) { |
michael@0 | 208 | return 1; |
michael@0 | 209 | } else { |
michael@0 | 210 | const int skip_coeff = m->mbmi.skip_coeff; |
michael@0 | 211 | vp9_write(w, skip_coeff, vp9_get_pred_prob_mbskip(&cpi->common, xd)); |
michael@0 | 212 | return skip_coeff; |
michael@0 | 213 | } |
michael@0 | 214 | } |
michael@0 | 215 | |
michael@0 | 216 | void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) { |
michael@0 | 217 | VP9_COMMON *cm = &cpi->common; |
michael@0 | 218 | int k; |
michael@0 | 219 | |
michael@0 | 220 | for (k = 0; k < MBSKIP_CONTEXTS; ++k) |
michael@0 | 221 | vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]); |
michael@0 | 222 | } |
michael@0 | 223 | |
michael@0 | 224 | static void write_intra_mode(vp9_writer *bc, int m, const vp9_prob *p) { |
michael@0 | 225 | write_token(bc, vp9_intra_mode_tree, p, vp9_intra_mode_encodings + m); |
michael@0 | 226 | } |
michael@0 | 227 | |
michael@0 | 228 | static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) { |
michael@0 | 229 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 230 | unsigned int branch_ct[SWITCHABLE_FILTERS - 1][2]; |
michael@0 | 231 | int i, j; |
michael@0 | 232 | for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) { |
michael@0 | 233 | vp9_tree_probs_from_distribution(vp9_switchable_interp_tree, branch_ct, |
michael@0 | 234 | cm->counts.switchable_interp[j]); |
michael@0 | 235 | |
michael@0 | 236 | for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i) |
michael@0 | 237 | vp9_cond_prob_diff_update(w, &cm->fc.switchable_interp_prob[j][i], |
michael@0 | 238 | branch_ct[i]); |
michael@0 | 239 | } |
michael@0 | 240 | |
michael@0 | 241 | #ifdef MODE_STATS |
michael@0 | 242 | if (!cpi->dummy_packing) |
michael@0 | 243 | update_switchable_interp_stats(cm); |
michael@0 | 244 | #endif |
michael@0 | 245 | } |
michael@0 | 246 | |
michael@0 | 247 | static void update_inter_mode_probs(VP9_COMMON *cm, vp9_writer *w) { |
michael@0 | 248 | int i, j; |
michael@0 | 249 | |
michael@0 | 250 | for (i = 0; i < INTER_MODE_CONTEXTS; ++i) { |
michael@0 | 251 | unsigned int branch_ct[INTER_MODES - 1][2]; |
michael@0 | 252 | vp9_tree_probs_from_distribution(vp9_inter_mode_tree, branch_ct, |
michael@0 | 253 | cm->counts.inter_mode[i]); |
michael@0 | 254 | |
michael@0 | 255 | for (j = 0; j < INTER_MODES - 1; ++j) |
michael@0 | 256 | vp9_cond_prob_diff_update(w, &cm->fc.inter_mode_probs[i][j], |
michael@0 | 257 | branch_ct[j]); |
michael@0 | 258 | } |
michael@0 | 259 | } |
michael@0 | 260 | |
michael@0 | 261 | static void pack_mb_tokens(vp9_writer* const w, |
michael@0 | 262 | TOKENEXTRA **tp, |
michael@0 | 263 | const TOKENEXTRA *const stop) { |
michael@0 | 264 | TOKENEXTRA *p = *tp; |
michael@0 | 265 | |
michael@0 | 266 | while (p < stop && p->token != EOSB_TOKEN) { |
michael@0 | 267 | const int t = p->token; |
michael@0 | 268 | const struct vp9_token *const a = &vp9_coef_encodings[t]; |
michael@0 | 269 | const vp9_extra_bit *const b = &vp9_extra_bits[t]; |
michael@0 | 270 | int i = 0; |
michael@0 | 271 | const vp9_prob *pp; |
michael@0 | 272 | int v = a->value; |
michael@0 | 273 | int n = a->len; |
michael@0 | 274 | vp9_prob probs[ENTROPY_NODES]; |
michael@0 | 275 | |
michael@0 | 276 | if (t >= TWO_TOKEN) { |
michael@0 | 277 | vp9_model_to_full_probs(p->context_tree, probs); |
michael@0 | 278 | pp = probs; |
michael@0 | 279 | } else { |
michael@0 | 280 | pp = p->context_tree; |
michael@0 | 281 | } |
michael@0 | 282 | assert(pp != 0); |
michael@0 | 283 | |
michael@0 | 284 | /* skip one or two nodes */ |
michael@0 | 285 | if (p->skip_eob_node) { |
michael@0 | 286 | n -= p->skip_eob_node; |
michael@0 | 287 | i = 2 * p->skip_eob_node; |
michael@0 | 288 | } |
michael@0 | 289 | |
michael@0 | 290 | do { |
michael@0 | 291 | const int bb = (v >> --n) & 1; |
michael@0 | 292 | vp9_write(w, bb, pp[i >> 1]); |
michael@0 | 293 | i = vp9_coef_tree[i + bb]; |
michael@0 | 294 | } while (n); |
michael@0 | 295 | |
michael@0 | 296 | if (b->base_val) { |
michael@0 | 297 | const int e = p->extra, l = b->len; |
michael@0 | 298 | |
michael@0 | 299 | if (l) { |
michael@0 | 300 | const unsigned char *pb = b->prob; |
michael@0 | 301 | int v = e >> 1; |
michael@0 | 302 | int n = l; /* number of bits in v, assumed nonzero */ |
michael@0 | 303 | int i = 0; |
michael@0 | 304 | |
michael@0 | 305 | do { |
michael@0 | 306 | const int bb = (v >> --n) & 1; |
michael@0 | 307 | vp9_write(w, bb, pb[i >> 1]); |
michael@0 | 308 | i = b->tree[i + bb]; |
michael@0 | 309 | } while (n); |
michael@0 | 310 | } |
michael@0 | 311 | |
michael@0 | 312 | vp9_write_bit(w, e & 1); |
michael@0 | 313 | } |
michael@0 | 314 | ++p; |
michael@0 | 315 | } |
michael@0 | 316 | |
michael@0 | 317 | *tp = p + (p->token == EOSB_TOKEN); |
michael@0 | 318 | } |
michael@0 | 319 | |
michael@0 | 320 | static void write_sb_mv_ref(vp9_writer *w, MB_PREDICTION_MODE mode, |
michael@0 | 321 | const vp9_prob *p) { |
michael@0 | 322 | assert(is_inter_mode(mode)); |
michael@0 | 323 | write_token(w, vp9_inter_mode_tree, p, |
michael@0 | 324 | &vp9_inter_mode_encodings[INTER_OFFSET(mode)]); |
michael@0 | 325 | } |
michael@0 | 326 | |
michael@0 | 327 | |
michael@0 | 328 | static void write_segment_id(vp9_writer *w, const struct segmentation *seg, |
michael@0 | 329 | int segment_id) { |
michael@0 | 330 | if (seg->enabled && seg->update_map) |
michael@0 | 331 | treed_write(w, vp9_segment_tree, seg->tree_probs, segment_id, 3); |
michael@0 | 332 | } |
michael@0 | 333 | |
michael@0 | 334 | // This function encodes the reference frame |
michael@0 | 335 | static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) { |
michael@0 | 336 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 337 | MACROBLOCK *const x = &cpi->mb; |
michael@0 | 338 | MACROBLOCKD *const xd = &x->e_mbd; |
michael@0 | 339 | MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi; |
michael@0 | 340 | const int segment_id = mi->segment_id; |
michael@0 | 341 | int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id, |
michael@0 | 342 | SEG_LVL_REF_FRAME); |
michael@0 | 343 | // If segment level coding of this signal is disabled... |
michael@0 | 344 | // or the segment allows multiple reference frame options |
michael@0 | 345 | if (!seg_ref_active) { |
michael@0 | 346 | // does the feature use compound prediction or not |
michael@0 | 347 | // (if not specified at the frame/segment level) |
michael@0 | 348 | if (cm->comp_pred_mode == HYBRID_PREDICTION) { |
michael@0 | 349 | vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME, |
michael@0 | 350 | vp9_get_pred_prob_comp_inter_inter(cm, xd)); |
michael@0 | 351 | } else { |
michael@0 | 352 | assert((mi->ref_frame[1] <= INTRA_FRAME) == |
michael@0 | 353 | (cm->comp_pred_mode == SINGLE_PREDICTION_ONLY)); |
michael@0 | 354 | } |
michael@0 | 355 | |
michael@0 | 356 | if (mi->ref_frame[1] > INTRA_FRAME) { |
michael@0 | 357 | vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME, |
michael@0 | 358 | vp9_get_pred_prob_comp_ref_p(cm, xd)); |
michael@0 | 359 | } else { |
michael@0 | 360 | vp9_write(bc, mi->ref_frame[0] != LAST_FRAME, |
michael@0 | 361 | vp9_get_pred_prob_single_ref_p1(cm, xd)); |
michael@0 | 362 | if (mi->ref_frame[0] != LAST_FRAME) |
michael@0 | 363 | vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME, |
michael@0 | 364 | vp9_get_pred_prob_single_ref_p2(cm, xd)); |
michael@0 | 365 | } |
michael@0 | 366 | } else { |
michael@0 | 367 | assert(mi->ref_frame[1] <= INTRA_FRAME); |
michael@0 | 368 | assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) == |
michael@0 | 369 | mi->ref_frame[0]); |
michael@0 | 370 | } |
michael@0 | 371 | |
michael@0 | 372 | // If using the prediction model we have nothing further to do because |
michael@0 | 373 | // the reference frame is fully coded by the segment. |
michael@0 | 374 | } |
michael@0 | 375 | |
michael@0 | 376 | static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) { |
michael@0 | 377 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 378 | const nmv_context *nmvc = &cm->fc.nmvc; |
michael@0 | 379 | MACROBLOCK *const x = &cpi->mb; |
michael@0 | 380 | MACROBLOCKD *const xd = &x->e_mbd; |
michael@0 | 381 | struct segmentation *seg = &cm->seg; |
michael@0 | 382 | MB_MODE_INFO *const mi = &m->mbmi; |
michael@0 | 383 | const MV_REFERENCE_FRAME rf = mi->ref_frame[0]; |
michael@0 | 384 | const MB_PREDICTION_MODE mode = mi->mode; |
michael@0 | 385 | const int segment_id = mi->segment_id; |
michael@0 | 386 | int skip_coeff; |
michael@0 | 387 | const BLOCK_SIZE bsize = mi->sb_type; |
michael@0 | 388 | const int allow_hp = cm->allow_high_precision_mv; |
michael@0 | 389 | |
michael@0 | 390 | #ifdef ENTROPY_STATS |
michael@0 | 391 | active_section = 9; |
michael@0 | 392 | #endif |
michael@0 | 393 | |
michael@0 | 394 | if (seg->update_map) { |
michael@0 | 395 | if (seg->temporal_update) { |
michael@0 | 396 | const int pred_flag = mi->seg_id_predicted; |
michael@0 | 397 | vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd); |
michael@0 | 398 | vp9_write(bc, pred_flag, pred_prob); |
michael@0 | 399 | if (!pred_flag) |
michael@0 | 400 | write_segment_id(bc, seg, segment_id); |
michael@0 | 401 | } else { |
michael@0 | 402 | write_segment_id(bc, seg, segment_id); |
michael@0 | 403 | } |
michael@0 | 404 | } |
michael@0 | 405 | |
michael@0 | 406 | skip_coeff = write_skip_coeff(cpi, segment_id, m, bc); |
michael@0 | 407 | |
michael@0 | 408 | if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME)) |
michael@0 | 409 | vp9_write(bc, rf != INTRA_FRAME, |
michael@0 | 410 | vp9_get_pred_prob_intra_inter(cm, xd)); |
michael@0 | 411 | |
michael@0 | 412 | if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT && |
michael@0 | 413 | !(rf != INTRA_FRAME && |
michael@0 | 414 | (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) { |
michael@0 | 415 | write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc); |
michael@0 | 416 | } |
michael@0 | 417 | |
michael@0 | 418 | if (rf == INTRA_FRAME) { |
michael@0 | 419 | #ifdef ENTROPY_STATS |
michael@0 | 420 | active_section = 6; |
michael@0 | 421 | #endif |
michael@0 | 422 | |
michael@0 | 423 | if (bsize >= BLOCK_8X8) { |
michael@0 | 424 | write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]); |
michael@0 | 425 | } else { |
michael@0 | 426 | int idx, idy; |
michael@0 | 427 | const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize]; |
michael@0 | 428 | const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize]; |
michael@0 | 429 | for (idy = 0; idy < 2; idy += num_4x4_blocks_high) { |
michael@0 | 430 | for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) { |
michael@0 | 431 | const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode; |
michael@0 | 432 | write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]); |
michael@0 | 433 | } |
michael@0 | 434 | } |
michael@0 | 435 | } |
michael@0 | 436 | write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]); |
michael@0 | 437 | } else { |
michael@0 | 438 | vp9_prob *mv_ref_p; |
michael@0 | 439 | encode_ref_frame(cpi, bc); |
michael@0 | 440 | mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]]; |
michael@0 | 441 | |
michael@0 | 442 | #ifdef ENTROPY_STATS |
michael@0 | 443 | active_section = 3; |
michael@0 | 444 | #endif |
michael@0 | 445 | |
michael@0 | 446 | // If segment skip is not enabled code the mode. |
michael@0 | 447 | if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) { |
michael@0 | 448 | if (bsize >= BLOCK_8X8) { |
michael@0 | 449 | write_sb_mv_ref(bc, mode, mv_ref_p); |
michael@0 | 450 | ++cm->counts.inter_mode[mi->mode_context[rf]] |
michael@0 | 451 | [INTER_OFFSET(mode)]; |
michael@0 | 452 | } |
michael@0 | 453 | } |
michael@0 | 454 | |
michael@0 | 455 | if (cm->mcomp_filter_type == SWITCHABLE) { |
michael@0 | 456 | const int ctx = vp9_get_pred_context_switchable_interp(xd); |
michael@0 | 457 | write_token(bc, vp9_switchable_interp_tree, |
michael@0 | 458 | cm->fc.switchable_interp_prob[ctx], |
michael@0 | 459 | &vp9_switchable_interp_encodings[mi->interp_filter]); |
michael@0 | 460 | } else { |
michael@0 | 461 | assert(mi->interp_filter == cm->mcomp_filter_type); |
michael@0 | 462 | } |
michael@0 | 463 | |
michael@0 | 464 | if (bsize < BLOCK_8X8) { |
michael@0 | 465 | const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize]; |
michael@0 | 466 | const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize]; |
michael@0 | 467 | int idx, idy; |
michael@0 | 468 | for (idy = 0; idy < 2; idy += num_4x4_blocks_high) { |
michael@0 | 469 | for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) { |
michael@0 | 470 | const int j = idy * 2 + idx; |
michael@0 | 471 | const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode; |
michael@0 | 472 | write_sb_mv_ref(bc, blockmode, mv_ref_p); |
michael@0 | 473 | ++cm->counts.inter_mode[mi->mode_context[rf]] |
michael@0 | 474 | [INTER_OFFSET(blockmode)]; |
michael@0 | 475 | |
michael@0 | 476 | if (blockmode == NEWMV) { |
michael@0 | 477 | #ifdef ENTROPY_STATS |
michael@0 | 478 | active_section = 11; |
michael@0 | 479 | #endif |
michael@0 | 480 | vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv, |
michael@0 | 481 | &mi->best_mv[0].as_mv, nmvc, allow_hp); |
michael@0 | 482 | |
michael@0 | 483 | if (has_second_ref(mi)) |
michael@0 | 484 | vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv, |
michael@0 | 485 | &mi->best_mv[1].as_mv, nmvc, allow_hp); |
michael@0 | 486 | } |
michael@0 | 487 | } |
michael@0 | 488 | } |
michael@0 | 489 | } else if (mode == NEWMV) { |
michael@0 | 490 | #ifdef ENTROPY_STATS |
michael@0 | 491 | active_section = 5; |
michael@0 | 492 | #endif |
michael@0 | 493 | vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv, |
michael@0 | 494 | &mi->best_mv[0].as_mv, nmvc, allow_hp); |
michael@0 | 495 | |
michael@0 | 496 | if (has_second_ref(mi)) |
michael@0 | 497 | vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv, |
michael@0 | 498 | &mi->best_mv[1].as_mv, nmvc, allow_hp); |
michael@0 | 499 | } |
michael@0 | 500 | } |
michael@0 | 501 | } |
michael@0 | 502 | |
michael@0 | 503 | static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8, |
michael@0 | 504 | vp9_writer *bc) { |
michael@0 | 505 | const VP9_COMMON *const cm = &cpi->common; |
michael@0 | 506 | const MACROBLOCKD *const xd = &cpi->mb.e_mbd; |
michael@0 | 507 | const struct segmentation *const seg = &cm->seg; |
michael@0 | 508 | MODE_INFO *m = mi_8x8[0]; |
michael@0 | 509 | const int ym = m->mbmi.mode; |
michael@0 | 510 | const int segment_id = m->mbmi.segment_id; |
michael@0 | 511 | MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride]; |
michael@0 | 512 | MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL; |
michael@0 | 513 | |
michael@0 | 514 | if (seg->update_map) |
michael@0 | 515 | write_segment_id(bc, seg, m->mbmi.segment_id); |
michael@0 | 516 | |
michael@0 | 517 | write_skip_coeff(cpi, segment_id, m, bc); |
michael@0 | 518 | |
michael@0 | 519 | if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT) |
michael@0 | 520 | write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc); |
michael@0 | 521 | |
michael@0 | 522 | if (m->mbmi.sb_type >= BLOCK_8X8) { |
michael@0 | 523 | const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0); |
michael@0 | 524 | const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0); |
michael@0 | 525 | write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]); |
michael@0 | 526 | } else { |
michael@0 | 527 | int idx, idy; |
michael@0 | 528 | const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type]; |
michael@0 | 529 | const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type]; |
michael@0 | 530 | for (idy = 0; idy < 2; idy += num_4x4_blocks_high) { |
michael@0 | 531 | for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) { |
michael@0 | 532 | int i = idy * 2 + idx; |
michael@0 | 533 | const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i); |
michael@0 | 534 | const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i); |
michael@0 | 535 | const int bm = m->bmi[i].as_mode; |
michael@0 | 536 | #ifdef ENTROPY_STATS |
michael@0 | 537 | ++intra_mode_stats[A][L][bm]; |
michael@0 | 538 | #endif |
michael@0 | 539 | write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]); |
michael@0 | 540 | } |
michael@0 | 541 | } |
michael@0 | 542 | } |
michael@0 | 543 | |
michael@0 | 544 | write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]); |
michael@0 | 545 | } |
michael@0 | 546 | |
michael@0 | 547 | static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile, |
michael@0 | 548 | vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end, |
michael@0 | 549 | int mi_row, int mi_col) { |
michael@0 | 550 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 551 | MACROBLOCKD *const xd = &cpi->mb.e_mbd; |
michael@0 | 552 | MODE_INFO *m; |
michael@0 | 553 | |
michael@0 | 554 | xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col); |
michael@0 | 555 | m = xd->mi_8x8[0]; |
michael@0 | 556 | |
michael@0 | 557 | set_mi_row_col(xd, tile, |
michael@0 | 558 | mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type], |
michael@0 | 559 | mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type], |
michael@0 | 560 | cm->mi_rows, cm->mi_cols); |
michael@0 | 561 | if (frame_is_intra_only(cm)) { |
michael@0 | 562 | write_mb_modes_kf(cpi, xd->mi_8x8, w); |
michael@0 | 563 | #ifdef ENTROPY_STATS |
michael@0 | 564 | active_section = 8; |
michael@0 | 565 | #endif |
michael@0 | 566 | } else { |
michael@0 | 567 | pack_inter_mode_mvs(cpi, m, w); |
michael@0 | 568 | #ifdef ENTROPY_STATS |
michael@0 | 569 | active_section = 1; |
michael@0 | 570 | #endif |
michael@0 | 571 | } |
michael@0 | 572 | |
michael@0 | 573 | assert(*tok < tok_end); |
michael@0 | 574 | pack_mb_tokens(w, tok, tok_end); |
michael@0 | 575 | } |
michael@0 | 576 | |
michael@0 | 577 | static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col, |
michael@0 | 578 | PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) { |
michael@0 | 579 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 580 | const int ctx = partition_plane_context(cpi->above_seg_context, |
michael@0 | 581 | cpi->left_seg_context, |
michael@0 | 582 | mi_row, mi_col, bsize); |
michael@0 | 583 | const vp9_prob *const probs = get_partition_probs(cm, ctx); |
michael@0 | 584 | const int has_rows = (mi_row + hbs) < cm->mi_rows; |
michael@0 | 585 | const int has_cols = (mi_col + hbs) < cm->mi_cols; |
michael@0 | 586 | |
michael@0 | 587 | if (has_rows && has_cols) { |
michael@0 | 588 | write_token(w, vp9_partition_tree, probs, &vp9_partition_encodings[p]); |
michael@0 | 589 | } else if (!has_rows && has_cols) { |
michael@0 | 590 | assert(p == PARTITION_SPLIT || p == PARTITION_HORZ); |
michael@0 | 591 | vp9_write(w, p == PARTITION_SPLIT, probs[1]); |
michael@0 | 592 | } else if (has_rows && !has_cols) { |
michael@0 | 593 | assert(p == PARTITION_SPLIT || p == PARTITION_VERT); |
michael@0 | 594 | vp9_write(w, p == PARTITION_SPLIT, probs[2]); |
michael@0 | 595 | } else { |
michael@0 | 596 | assert(p == PARTITION_SPLIT); |
michael@0 | 597 | } |
michael@0 | 598 | } |
michael@0 | 599 | |
michael@0 | 600 | static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile, |
michael@0 | 601 | vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end, |
michael@0 | 602 | int mi_row, int mi_col, BLOCK_SIZE bsize) { |
michael@0 | 603 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 604 | const int bsl = b_width_log2(bsize); |
michael@0 | 605 | const int bs = (1 << bsl) / 4; |
michael@0 | 606 | PARTITION_TYPE partition; |
michael@0 | 607 | BLOCK_SIZE subsize; |
michael@0 | 608 | MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col]; |
michael@0 | 609 | |
michael@0 | 610 | if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) |
michael@0 | 611 | return; |
michael@0 | 612 | |
michael@0 | 613 | partition = partition_lookup[bsl][m->mbmi.sb_type]; |
michael@0 | 614 | write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w); |
michael@0 | 615 | subsize = get_subsize(bsize, partition); |
michael@0 | 616 | if (subsize < BLOCK_8X8) { |
michael@0 | 617 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col); |
michael@0 | 618 | } else { |
michael@0 | 619 | switch (partition) { |
michael@0 | 620 | case PARTITION_NONE: |
michael@0 | 621 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col); |
michael@0 | 622 | break; |
michael@0 | 623 | case PARTITION_HORZ: |
michael@0 | 624 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col); |
michael@0 | 625 | if (mi_row + bs < cm->mi_rows) |
michael@0 | 626 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col); |
michael@0 | 627 | break; |
michael@0 | 628 | case PARTITION_VERT: |
michael@0 | 629 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col); |
michael@0 | 630 | if (mi_col + bs < cm->mi_cols) |
michael@0 | 631 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs); |
michael@0 | 632 | break; |
michael@0 | 633 | case PARTITION_SPLIT: |
michael@0 | 634 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize); |
michael@0 | 635 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs, |
michael@0 | 636 | subsize); |
michael@0 | 637 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col, |
michael@0 | 638 | subsize); |
michael@0 | 639 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs, |
michael@0 | 640 | subsize); |
michael@0 | 641 | break; |
michael@0 | 642 | default: |
michael@0 | 643 | assert(0); |
michael@0 | 644 | } |
michael@0 | 645 | } |
michael@0 | 646 | |
michael@0 | 647 | // update partition context |
michael@0 | 648 | if (bsize >= BLOCK_8X8 && |
michael@0 | 649 | (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT)) |
michael@0 | 650 | update_partition_context(cpi->above_seg_context, cpi->left_seg_context, |
michael@0 | 651 | mi_row, mi_col, subsize, bsize); |
michael@0 | 652 | } |
michael@0 | 653 | |
michael@0 | 654 | static void write_modes(VP9_COMP *cpi, const TileInfo *const tile, |
michael@0 | 655 | vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) { |
michael@0 | 656 | int mi_row, mi_col; |
michael@0 | 657 | |
michael@0 | 658 | for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end; |
michael@0 | 659 | mi_row += MI_BLOCK_SIZE) { |
michael@0 | 660 | vp9_zero(cpi->left_seg_context); |
michael@0 | 661 | for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end; |
michael@0 | 662 | mi_col += MI_BLOCK_SIZE) |
michael@0 | 663 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64); |
michael@0 | 664 | } |
michael@0 | 665 | } |
michael@0 | 666 | |
michael@0 | 667 | static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) { |
michael@0 | 668 | vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size]; |
michael@0 | 669 | vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size]; |
michael@0 | 670 | unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] = |
michael@0 | 671 | cpi->common.counts.eob_branch[tx_size]; |
michael@0 | 672 | vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size]; |
michael@0 | 673 | int i, j, k, l, m; |
michael@0 | 674 | |
michael@0 | 675 | for (i = 0; i < BLOCK_TYPES; ++i) { |
michael@0 | 676 | for (j = 0; j < REF_TYPES; ++j) { |
michael@0 | 677 | for (k = 0; k < COEF_BANDS; ++k) { |
michael@0 | 678 | for (l = 0; l < PREV_COEF_CONTEXTS; ++l) { |
michael@0 | 679 | if (l >= 3 && k == 0) |
michael@0 | 680 | continue; |
michael@0 | 681 | vp9_tree_probs_from_distribution(vp9_coef_tree, |
michael@0 | 682 | coef_branch_ct[i][j][k][l], |
michael@0 | 683 | coef_counts[i][j][k][l]); |
michael@0 | 684 | coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] - |
michael@0 | 685 | coef_branch_ct[i][j][k][l][0][0]; |
michael@0 | 686 | for (m = 0; m < UNCONSTRAINED_NODES; ++m) |
michael@0 | 687 | coef_probs[i][j][k][l][m] = get_binary_prob( |
michael@0 | 688 | coef_branch_ct[i][j][k][l][m][0], |
michael@0 | 689 | coef_branch_ct[i][j][k][l][m][1]); |
michael@0 | 690 | #ifdef ENTROPY_STATS |
michael@0 | 691 | if (!cpi->dummy_packing) { |
michael@0 | 692 | int t; |
michael@0 | 693 | for (t = 0; t < MAX_ENTROPY_TOKENS; ++t) |
michael@0 | 694 | context_counters[tx_size][i][j][k][l][t] += |
michael@0 | 695 | coef_counts[i][j][k][l][t]; |
michael@0 | 696 | context_counters[tx_size][i][j][k][l][MAX_ENTROPY_TOKENS] += |
michael@0 | 697 | eob_branch_ct[i][j][k][l]; |
michael@0 | 698 | } |
michael@0 | 699 | #endif |
michael@0 | 700 | } |
michael@0 | 701 | } |
michael@0 | 702 | } |
michael@0 | 703 | } |
michael@0 | 704 | } |
michael@0 | 705 | |
michael@0 | 706 | static void build_coeff_contexts(VP9_COMP *cpi) { |
michael@0 | 707 | TX_SIZE t; |
michael@0 | 708 | for (t = TX_4X4; t <= TX_32X32; t++) |
michael@0 | 709 | build_tree_distribution(cpi, t); |
michael@0 | 710 | } |
michael@0 | 711 | |
michael@0 | 712 | static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi, |
michael@0 | 713 | TX_SIZE tx_size) { |
michael@0 | 714 | vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size]; |
michael@0 | 715 | vp9_coeff_probs_model *old_frame_coef_probs = |
michael@0 | 716 | cpi->common.fc.coef_probs[tx_size]; |
michael@0 | 717 | vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size]; |
michael@0 | 718 | const vp9_prob upd = DIFF_UPDATE_PROB; |
michael@0 | 719 | const int entropy_nodes_update = UNCONSTRAINED_NODES; |
michael@0 | 720 | int i, j, k, l, t; |
michael@0 | 721 | switch (cpi->sf.use_fast_coef_updates) { |
michael@0 | 722 | case 0: { |
michael@0 | 723 | /* dry run to see if there is any udpate at all needed */ |
michael@0 | 724 | int savings = 0; |
michael@0 | 725 | int update[2] = {0, 0}; |
michael@0 | 726 | for (i = 0; i < BLOCK_TYPES; ++i) { |
michael@0 | 727 | for (j = 0; j < REF_TYPES; ++j) { |
michael@0 | 728 | for (k = 0; k < COEF_BANDS; ++k) { |
michael@0 | 729 | for (l = 0; l < PREV_COEF_CONTEXTS; ++l) { |
michael@0 | 730 | for (t = 0; t < entropy_nodes_update; ++t) { |
michael@0 | 731 | vp9_prob newp = new_frame_coef_probs[i][j][k][l][t]; |
michael@0 | 732 | const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t]; |
michael@0 | 733 | int s; |
michael@0 | 734 | int u = 0; |
michael@0 | 735 | |
michael@0 | 736 | if (l >= 3 && k == 0) |
michael@0 | 737 | continue; |
michael@0 | 738 | if (t == PIVOT_NODE) |
michael@0 | 739 | s = vp9_prob_diff_update_savings_search_model( |
michael@0 | 740 | frame_branch_ct[i][j][k][l][0], |
michael@0 | 741 | old_frame_coef_probs[i][j][k][l], &newp, upd, i, j); |
michael@0 | 742 | else |
michael@0 | 743 | s = vp9_prob_diff_update_savings_search( |
michael@0 | 744 | frame_branch_ct[i][j][k][l][t], oldp, &newp, upd); |
michael@0 | 745 | if (s > 0 && newp != oldp) |
michael@0 | 746 | u = 1; |
michael@0 | 747 | if (u) |
michael@0 | 748 | savings += s - (int)(vp9_cost_zero(upd)); |
michael@0 | 749 | else |
michael@0 | 750 | savings -= (int)(vp9_cost_zero(upd)); |
michael@0 | 751 | update[u]++; |
michael@0 | 752 | } |
michael@0 | 753 | } |
michael@0 | 754 | } |
michael@0 | 755 | } |
michael@0 | 756 | } |
michael@0 | 757 | |
michael@0 | 758 | // printf("Update %d %d, savings %d\n", update[0], update[1], savings); |
michael@0 | 759 | /* Is coef updated at all */ |
michael@0 | 760 | if (update[1] == 0 || savings < 0) { |
michael@0 | 761 | vp9_write_bit(bc, 0); |
michael@0 | 762 | return; |
michael@0 | 763 | } |
michael@0 | 764 | vp9_write_bit(bc, 1); |
michael@0 | 765 | for (i = 0; i < BLOCK_TYPES; ++i) { |
michael@0 | 766 | for (j = 0; j < REF_TYPES; ++j) { |
michael@0 | 767 | for (k = 0; k < COEF_BANDS; ++k) { |
michael@0 | 768 | for (l = 0; l < PREV_COEF_CONTEXTS; ++l) { |
michael@0 | 769 | // calc probs and branch cts for this frame only |
michael@0 | 770 | for (t = 0; t < entropy_nodes_update; ++t) { |
michael@0 | 771 | vp9_prob newp = new_frame_coef_probs[i][j][k][l][t]; |
michael@0 | 772 | vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t; |
michael@0 | 773 | const vp9_prob upd = DIFF_UPDATE_PROB; |
michael@0 | 774 | int s; |
michael@0 | 775 | int u = 0; |
michael@0 | 776 | if (l >= 3 && k == 0) |
michael@0 | 777 | continue; |
michael@0 | 778 | if (t == PIVOT_NODE) |
michael@0 | 779 | s = vp9_prob_diff_update_savings_search_model( |
michael@0 | 780 | frame_branch_ct[i][j][k][l][0], |
michael@0 | 781 | old_frame_coef_probs[i][j][k][l], &newp, upd, i, j); |
michael@0 | 782 | else |
michael@0 | 783 | s = vp9_prob_diff_update_savings_search( |
michael@0 | 784 | frame_branch_ct[i][j][k][l][t], |
michael@0 | 785 | *oldp, &newp, upd); |
michael@0 | 786 | if (s > 0 && newp != *oldp) |
michael@0 | 787 | u = 1; |
michael@0 | 788 | vp9_write(bc, u, upd); |
michael@0 | 789 | #ifdef ENTROPY_STATS |
michael@0 | 790 | if (!cpi->dummy_packing) |
michael@0 | 791 | ++tree_update_hist[tx_size][i][j][k][l][t][u]; |
michael@0 | 792 | #endif |
michael@0 | 793 | if (u) { |
michael@0 | 794 | /* send/use new probability */ |
michael@0 | 795 | vp9_write_prob_diff_update(bc, newp, *oldp); |
michael@0 | 796 | *oldp = newp; |
michael@0 | 797 | } |
michael@0 | 798 | } |
michael@0 | 799 | } |
michael@0 | 800 | } |
michael@0 | 801 | } |
michael@0 | 802 | } |
michael@0 | 803 | return; |
michael@0 | 804 | } |
michael@0 | 805 | |
michael@0 | 806 | case 1: |
michael@0 | 807 | case 2: { |
michael@0 | 808 | const int prev_coef_contexts_to_update = |
michael@0 | 809 | (cpi->sf.use_fast_coef_updates == 2 ? |
michael@0 | 810 | PREV_COEF_CONTEXTS >> 1 : PREV_COEF_CONTEXTS); |
michael@0 | 811 | const int coef_band_to_update = |
michael@0 | 812 | (cpi->sf.use_fast_coef_updates == 2 ? |
michael@0 | 813 | COEF_BANDS >> 1 : COEF_BANDS); |
michael@0 | 814 | int updates = 0; |
michael@0 | 815 | int noupdates_before_first = 0; |
michael@0 | 816 | for (i = 0; i < BLOCK_TYPES; ++i) { |
michael@0 | 817 | for (j = 0; j < REF_TYPES; ++j) { |
michael@0 | 818 | for (k = 0; k < COEF_BANDS; ++k) { |
michael@0 | 819 | for (l = 0; l < PREV_COEF_CONTEXTS; ++l) { |
michael@0 | 820 | // calc probs and branch cts for this frame only |
michael@0 | 821 | for (t = 0; t < entropy_nodes_update; ++t) { |
michael@0 | 822 | vp9_prob newp = new_frame_coef_probs[i][j][k][l][t]; |
michael@0 | 823 | vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t; |
michael@0 | 824 | int s; |
michael@0 | 825 | int u = 0; |
michael@0 | 826 | if (l >= 3 && k == 0) |
michael@0 | 827 | continue; |
michael@0 | 828 | if (l >= prev_coef_contexts_to_update || |
michael@0 | 829 | k >= coef_band_to_update) { |
michael@0 | 830 | u = 0; |
michael@0 | 831 | } else { |
michael@0 | 832 | if (t == PIVOT_NODE) |
michael@0 | 833 | s = vp9_prob_diff_update_savings_search_model( |
michael@0 | 834 | frame_branch_ct[i][j][k][l][0], |
michael@0 | 835 | old_frame_coef_probs[i][j][k][l], &newp, upd, i, j); |
michael@0 | 836 | else |
michael@0 | 837 | s = vp9_prob_diff_update_savings_search( |
michael@0 | 838 | frame_branch_ct[i][j][k][l][t], |
michael@0 | 839 | *oldp, &newp, upd); |
michael@0 | 840 | if (s > 0 && newp != *oldp) |
michael@0 | 841 | u = 1; |
michael@0 | 842 | } |
michael@0 | 843 | updates += u; |
michael@0 | 844 | if (u == 0 && updates == 0) { |
michael@0 | 845 | noupdates_before_first++; |
michael@0 | 846 | #ifdef ENTROPY_STATS |
michael@0 | 847 | if (!cpi->dummy_packing) |
michael@0 | 848 | ++tree_update_hist[tx_size][i][j][k][l][t][u]; |
michael@0 | 849 | #endif |
michael@0 | 850 | continue; |
michael@0 | 851 | } |
michael@0 | 852 | if (u == 1 && updates == 1) { |
michael@0 | 853 | int v; |
michael@0 | 854 | // first update |
michael@0 | 855 | vp9_write_bit(bc, 1); |
michael@0 | 856 | for (v = 0; v < noupdates_before_first; ++v) |
michael@0 | 857 | vp9_write(bc, 0, upd); |
michael@0 | 858 | } |
michael@0 | 859 | vp9_write(bc, u, upd); |
michael@0 | 860 | #ifdef ENTROPY_STATS |
michael@0 | 861 | if (!cpi->dummy_packing) |
michael@0 | 862 | ++tree_update_hist[tx_size][i][j][k][l][t][u]; |
michael@0 | 863 | #endif |
michael@0 | 864 | if (u) { |
michael@0 | 865 | /* send/use new probability */ |
michael@0 | 866 | vp9_write_prob_diff_update(bc, newp, *oldp); |
michael@0 | 867 | *oldp = newp; |
michael@0 | 868 | } |
michael@0 | 869 | } |
michael@0 | 870 | } |
michael@0 | 871 | } |
michael@0 | 872 | } |
michael@0 | 873 | } |
michael@0 | 874 | if (updates == 0) { |
michael@0 | 875 | vp9_write_bit(bc, 0); // no updates |
michael@0 | 876 | } |
michael@0 | 877 | return; |
michael@0 | 878 | } |
michael@0 | 879 | |
michael@0 | 880 | default: |
michael@0 | 881 | assert(0); |
michael@0 | 882 | } |
michael@0 | 883 | } |
michael@0 | 884 | |
michael@0 | 885 | static void update_coef_probs(VP9_COMP* const cpi, vp9_writer* const bc) { |
michael@0 | 886 | const TX_MODE tx_mode = cpi->common.tx_mode; |
michael@0 | 887 | |
michael@0 | 888 | vp9_clear_system_state(); |
michael@0 | 889 | |
michael@0 | 890 | // Build the cofficient contexts based on counts collected in encode loop |
michael@0 | 891 | build_coeff_contexts(cpi); |
michael@0 | 892 | |
michael@0 | 893 | update_coef_probs_common(bc, cpi, TX_4X4); |
michael@0 | 894 | |
michael@0 | 895 | // do not do this if not even allowed |
michael@0 | 896 | if (tx_mode > ONLY_4X4) |
michael@0 | 897 | update_coef_probs_common(bc, cpi, TX_8X8); |
michael@0 | 898 | |
michael@0 | 899 | if (tx_mode > ALLOW_8X8) |
michael@0 | 900 | update_coef_probs_common(bc, cpi, TX_16X16); |
michael@0 | 901 | |
michael@0 | 902 | if (tx_mode > ALLOW_16X16) |
michael@0 | 903 | update_coef_probs_common(bc, cpi, TX_32X32); |
michael@0 | 904 | } |
michael@0 | 905 | |
michael@0 | 906 | static void encode_loopfilter(struct loopfilter *lf, |
michael@0 | 907 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 908 | int i; |
michael@0 | 909 | |
michael@0 | 910 | // Encode the loop filter level and type |
michael@0 | 911 | vp9_wb_write_literal(wb, lf->filter_level, 6); |
michael@0 | 912 | vp9_wb_write_literal(wb, lf->sharpness_level, 3); |
michael@0 | 913 | |
michael@0 | 914 | // Write out loop filter deltas applied at the MB level based on mode or |
michael@0 | 915 | // ref frame (if they are enabled). |
michael@0 | 916 | vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled); |
michael@0 | 917 | |
michael@0 | 918 | if (lf->mode_ref_delta_enabled) { |
michael@0 | 919 | // Do the deltas need to be updated |
michael@0 | 920 | vp9_wb_write_bit(wb, lf->mode_ref_delta_update); |
michael@0 | 921 | if (lf->mode_ref_delta_update) { |
michael@0 | 922 | // Send update |
michael@0 | 923 | for (i = 0; i < MAX_REF_LF_DELTAS; i++) { |
michael@0 | 924 | const int delta = lf->ref_deltas[i]; |
michael@0 | 925 | |
michael@0 | 926 | // Frame level data |
michael@0 | 927 | if (delta != lf->last_ref_deltas[i]) { |
michael@0 | 928 | lf->last_ref_deltas[i] = delta; |
michael@0 | 929 | vp9_wb_write_bit(wb, 1); |
michael@0 | 930 | |
michael@0 | 931 | assert(delta != 0); |
michael@0 | 932 | vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6); |
michael@0 | 933 | vp9_wb_write_bit(wb, delta < 0); |
michael@0 | 934 | } else { |
michael@0 | 935 | vp9_wb_write_bit(wb, 0); |
michael@0 | 936 | } |
michael@0 | 937 | } |
michael@0 | 938 | |
michael@0 | 939 | // Send update |
michael@0 | 940 | for (i = 0; i < MAX_MODE_LF_DELTAS; i++) { |
michael@0 | 941 | const int delta = lf->mode_deltas[i]; |
michael@0 | 942 | if (delta != lf->last_mode_deltas[i]) { |
michael@0 | 943 | lf->last_mode_deltas[i] = delta; |
michael@0 | 944 | vp9_wb_write_bit(wb, 1); |
michael@0 | 945 | |
michael@0 | 946 | assert(delta != 0); |
michael@0 | 947 | vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6); |
michael@0 | 948 | vp9_wb_write_bit(wb, delta < 0); |
michael@0 | 949 | } else { |
michael@0 | 950 | vp9_wb_write_bit(wb, 0); |
michael@0 | 951 | } |
michael@0 | 952 | } |
michael@0 | 953 | } |
michael@0 | 954 | } |
michael@0 | 955 | } |
michael@0 | 956 | |
michael@0 | 957 | static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) { |
michael@0 | 958 | if (delta_q != 0) { |
michael@0 | 959 | vp9_wb_write_bit(wb, 1); |
michael@0 | 960 | vp9_wb_write_literal(wb, abs(delta_q), 4); |
michael@0 | 961 | vp9_wb_write_bit(wb, delta_q < 0); |
michael@0 | 962 | } else { |
michael@0 | 963 | vp9_wb_write_bit(wb, 0); |
michael@0 | 964 | } |
michael@0 | 965 | } |
michael@0 | 966 | |
michael@0 | 967 | static void encode_quantization(VP9_COMMON *cm, |
michael@0 | 968 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 969 | vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS); |
michael@0 | 970 | write_delta_q(wb, cm->y_dc_delta_q); |
michael@0 | 971 | write_delta_q(wb, cm->uv_dc_delta_q); |
michael@0 | 972 | write_delta_q(wb, cm->uv_ac_delta_q); |
michael@0 | 973 | } |
michael@0 | 974 | |
michael@0 | 975 | |
michael@0 | 976 | static void encode_segmentation(VP9_COMP *cpi, |
michael@0 | 977 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 978 | int i, j; |
michael@0 | 979 | |
michael@0 | 980 | struct segmentation *seg = &cpi->common.seg; |
michael@0 | 981 | |
michael@0 | 982 | vp9_wb_write_bit(wb, seg->enabled); |
michael@0 | 983 | if (!seg->enabled) |
michael@0 | 984 | return; |
michael@0 | 985 | |
michael@0 | 986 | // Segmentation map |
michael@0 | 987 | vp9_wb_write_bit(wb, seg->update_map); |
michael@0 | 988 | if (seg->update_map) { |
michael@0 | 989 | // Select the coding strategy (temporal or spatial) |
michael@0 | 990 | vp9_choose_segmap_coding_method(cpi); |
michael@0 | 991 | // Write out probabilities used to decode unpredicted macro-block segments |
michael@0 | 992 | for (i = 0; i < SEG_TREE_PROBS; i++) { |
michael@0 | 993 | const int prob = seg->tree_probs[i]; |
michael@0 | 994 | const int update = prob != MAX_PROB; |
michael@0 | 995 | vp9_wb_write_bit(wb, update); |
michael@0 | 996 | if (update) |
michael@0 | 997 | vp9_wb_write_literal(wb, prob, 8); |
michael@0 | 998 | } |
michael@0 | 999 | |
michael@0 | 1000 | // Write out the chosen coding method. |
michael@0 | 1001 | vp9_wb_write_bit(wb, seg->temporal_update); |
michael@0 | 1002 | if (seg->temporal_update) { |
michael@0 | 1003 | for (i = 0; i < PREDICTION_PROBS; i++) { |
michael@0 | 1004 | const int prob = seg->pred_probs[i]; |
michael@0 | 1005 | const int update = prob != MAX_PROB; |
michael@0 | 1006 | vp9_wb_write_bit(wb, update); |
michael@0 | 1007 | if (update) |
michael@0 | 1008 | vp9_wb_write_literal(wb, prob, 8); |
michael@0 | 1009 | } |
michael@0 | 1010 | } |
michael@0 | 1011 | } |
michael@0 | 1012 | |
michael@0 | 1013 | // Segmentation data |
michael@0 | 1014 | vp9_wb_write_bit(wb, seg->update_data); |
michael@0 | 1015 | if (seg->update_data) { |
michael@0 | 1016 | vp9_wb_write_bit(wb, seg->abs_delta); |
michael@0 | 1017 | |
michael@0 | 1018 | for (i = 0; i < MAX_SEGMENTS; i++) { |
michael@0 | 1019 | for (j = 0; j < SEG_LVL_MAX; j++) { |
michael@0 | 1020 | const int active = vp9_segfeature_active(seg, i, j); |
michael@0 | 1021 | vp9_wb_write_bit(wb, active); |
michael@0 | 1022 | if (active) { |
michael@0 | 1023 | const int data = vp9_get_segdata(seg, i, j); |
michael@0 | 1024 | const int data_max = vp9_seg_feature_data_max(j); |
michael@0 | 1025 | |
michael@0 | 1026 | if (vp9_is_segfeature_signed(j)) { |
michael@0 | 1027 | vp9_encode_unsigned_max(wb, abs(data), data_max); |
michael@0 | 1028 | vp9_wb_write_bit(wb, data < 0); |
michael@0 | 1029 | } else { |
michael@0 | 1030 | vp9_encode_unsigned_max(wb, data, data_max); |
michael@0 | 1031 | } |
michael@0 | 1032 | } |
michael@0 | 1033 | } |
michael@0 | 1034 | } |
michael@0 | 1035 | } |
michael@0 | 1036 | } |
michael@0 | 1037 | |
michael@0 | 1038 | |
michael@0 | 1039 | static void encode_txfm_probs(VP9_COMP *cpi, vp9_writer *w) { |
michael@0 | 1040 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1041 | |
michael@0 | 1042 | // Mode |
michael@0 | 1043 | vp9_write_literal(w, MIN(cm->tx_mode, ALLOW_32X32), 2); |
michael@0 | 1044 | if (cm->tx_mode >= ALLOW_32X32) |
michael@0 | 1045 | vp9_write_bit(w, cm->tx_mode == TX_MODE_SELECT); |
michael@0 | 1046 | |
michael@0 | 1047 | // Probabilities |
michael@0 | 1048 | if (cm->tx_mode == TX_MODE_SELECT) { |
michael@0 | 1049 | int i, j; |
michael@0 | 1050 | unsigned int ct_8x8p[TX_SIZES - 3][2]; |
michael@0 | 1051 | unsigned int ct_16x16p[TX_SIZES - 2][2]; |
michael@0 | 1052 | unsigned int ct_32x32p[TX_SIZES - 1][2]; |
michael@0 | 1053 | |
michael@0 | 1054 | |
michael@0 | 1055 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 1056 | tx_counts_to_branch_counts_8x8(cm->counts.tx.p8x8[i], ct_8x8p); |
michael@0 | 1057 | for (j = 0; j < TX_SIZES - 3; j++) |
michael@0 | 1058 | vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p8x8[i][j], ct_8x8p[j]); |
michael@0 | 1059 | } |
michael@0 | 1060 | |
michael@0 | 1061 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 1062 | tx_counts_to_branch_counts_16x16(cm->counts.tx.p16x16[i], ct_16x16p); |
michael@0 | 1063 | for (j = 0; j < TX_SIZES - 2; j++) |
michael@0 | 1064 | vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p16x16[i][j], |
michael@0 | 1065 | ct_16x16p[j]); |
michael@0 | 1066 | } |
michael@0 | 1067 | |
michael@0 | 1068 | for (i = 0; i < TX_SIZE_CONTEXTS; i++) { |
michael@0 | 1069 | tx_counts_to_branch_counts_32x32(cm->counts.tx.p32x32[i], ct_32x32p); |
michael@0 | 1070 | for (j = 0; j < TX_SIZES - 1; j++) |
michael@0 | 1071 | vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p32x32[i][j], |
michael@0 | 1072 | ct_32x32p[j]); |
michael@0 | 1073 | } |
michael@0 | 1074 | #ifdef MODE_STATS |
michael@0 | 1075 | if (!cpi->dummy_packing) |
michael@0 | 1076 | update_tx_count_stats(cm); |
michael@0 | 1077 | #endif |
michael@0 | 1078 | } |
michael@0 | 1079 | } |
michael@0 | 1080 | |
michael@0 | 1081 | static void write_interp_filter_type(INTERPOLATION_TYPE type, |
michael@0 | 1082 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 1083 | const int type_to_literal[] = { 1, 0, 2, 3 }; |
michael@0 | 1084 | |
michael@0 | 1085 | vp9_wb_write_bit(wb, type == SWITCHABLE); |
michael@0 | 1086 | if (type != SWITCHABLE) |
michael@0 | 1087 | vp9_wb_write_literal(wb, type_to_literal[type], 2); |
michael@0 | 1088 | } |
michael@0 | 1089 | |
michael@0 | 1090 | static void fix_mcomp_filter_type(VP9_COMP *cpi) { |
michael@0 | 1091 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1092 | |
michael@0 | 1093 | if (cm->mcomp_filter_type == SWITCHABLE) { |
michael@0 | 1094 | // Check to see if only one of the filters is actually used |
michael@0 | 1095 | int count[SWITCHABLE_FILTERS]; |
michael@0 | 1096 | int i, j, c = 0; |
michael@0 | 1097 | for (i = 0; i < SWITCHABLE_FILTERS; ++i) { |
michael@0 | 1098 | count[i] = 0; |
michael@0 | 1099 | for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) |
michael@0 | 1100 | count[i] += cm->counts.switchable_interp[j][i]; |
michael@0 | 1101 | c += (count[i] > 0); |
michael@0 | 1102 | } |
michael@0 | 1103 | if (c == 1) { |
michael@0 | 1104 | // Only one filter is used. So set the filter at frame level |
michael@0 | 1105 | for (i = 0; i < SWITCHABLE_FILTERS; ++i) { |
michael@0 | 1106 | if (count[i]) { |
michael@0 | 1107 | cm->mcomp_filter_type = i; |
michael@0 | 1108 | break; |
michael@0 | 1109 | } |
michael@0 | 1110 | } |
michael@0 | 1111 | } |
michael@0 | 1112 | } |
michael@0 | 1113 | } |
michael@0 | 1114 | |
michael@0 | 1115 | static void write_tile_info(VP9_COMMON *cm, struct vp9_write_bit_buffer *wb) { |
michael@0 | 1116 | int min_log2_tile_cols, max_log2_tile_cols, ones; |
michael@0 | 1117 | vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols); |
michael@0 | 1118 | |
michael@0 | 1119 | // columns |
michael@0 | 1120 | ones = cm->log2_tile_cols - min_log2_tile_cols; |
michael@0 | 1121 | while (ones--) |
michael@0 | 1122 | vp9_wb_write_bit(wb, 1); |
michael@0 | 1123 | |
michael@0 | 1124 | if (cm->log2_tile_cols < max_log2_tile_cols) |
michael@0 | 1125 | vp9_wb_write_bit(wb, 0); |
michael@0 | 1126 | |
michael@0 | 1127 | // rows |
michael@0 | 1128 | vp9_wb_write_bit(wb, cm->log2_tile_rows != 0); |
michael@0 | 1129 | if (cm->log2_tile_rows != 0) |
michael@0 | 1130 | vp9_wb_write_bit(wb, cm->log2_tile_rows != 1); |
michael@0 | 1131 | } |
michael@0 | 1132 | |
michael@0 | 1133 | static int get_refresh_mask(VP9_COMP *cpi) { |
michael@0 | 1134 | // Should the GF or ARF be updated using the transmitted frame or buffer |
michael@0 | 1135 | #if CONFIG_MULTIPLE_ARF |
michael@0 | 1136 | if (!cpi->multi_arf_enabled && cpi->refresh_golden_frame && |
michael@0 | 1137 | !cpi->refresh_alt_ref_frame) { |
michael@0 | 1138 | #else |
michael@0 | 1139 | if (cpi->refresh_golden_frame && !cpi->refresh_alt_ref_frame && |
michael@0 | 1140 | !cpi->use_svc) { |
michael@0 | 1141 | #endif |
michael@0 | 1142 | // Preserve the previously existing golden frame and update the frame in |
michael@0 | 1143 | // the alt ref slot instead. This is highly specific to the use of |
michael@0 | 1144 | // alt-ref as a forward reference, and this needs to be generalized as |
michael@0 | 1145 | // other uses are implemented (like RTC/temporal scaling) |
michael@0 | 1146 | // |
michael@0 | 1147 | // gld_fb_idx and alt_fb_idx need to be swapped for future frames, but |
michael@0 | 1148 | // that happens in vp9_onyx_if.c:update_reference_frames() so that it can |
michael@0 | 1149 | // be done outside of the recode loop. |
michael@0 | 1150 | return (cpi->refresh_last_frame << cpi->lst_fb_idx) | |
michael@0 | 1151 | (cpi->refresh_golden_frame << cpi->alt_fb_idx); |
michael@0 | 1152 | } else { |
michael@0 | 1153 | int arf_idx = cpi->alt_fb_idx; |
michael@0 | 1154 | #if CONFIG_MULTIPLE_ARF |
michael@0 | 1155 | // Determine which ARF buffer to use to encode this ARF frame. |
michael@0 | 1156 | if (cpi->multi_arf_enabled) { |
michael@0 | 1157 | int sn = cpi->sequence_number; |
michael@0 | 1158 | arf_idx = (cpi->frame_coding_order[sn] < 0) ? |
michael@0 | 1159 | cpi->arf_buffer_idx[sn + 1] : |
michael@0 | 1160 | cpi->arf_buffer_idx[sn]; |
michael@0 | 1161 | } |
michael@0 | 1162 | #endif |
michael@0 | 1163 | return (cpi->refresh_last_frame << cpi->lst_fb_idx) | |
michael@0 | 1164 | (cpi->refresh_golden_frame << cpi->gld_fb_idx) | |
michael@0 | 1165 | (cpi->refresh_alt_ref_frame << arf_idx); |
michael@0 | 1166 | } |
michael@0 | 1167 | } |
michael@0 | 1168 | |
michael@0 | 1169 | static size_t encode_tiles(VP9_COMP *cpi, uint8_t *data_ptr) { |
michael@0 | 1170 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1171 | vp9_writer residual_bc; |
michael@0 | 1172 | |
michael@0 | 1173 | int tile_row, tile_col; |
michael@0 | 1174 | TOKENEXTRA *tok[4][1 << 6], *tok_end; |
michael@0 | 1175 | size_t total_size = 0; |
michael@0 | 1176 | const int tile_cols = 1 << cm->log2_tile_cols; |
michael@0 | 1177 | const int tile_rows = 1 << cm->log2_tile_rows; |
michael@0 | 1178 | |
michael@0 | 1179 | vpx_memset(cpi->above_seg_context, 0, sizeof(*cpi->above_seg_context) * |
michael@0 | 1180 | mi_cols_aligned_to_sb(cm->mi_cols)); |
michael@0 | 1181 | |
michael@0 | 1182 | tok[0][0] = cpi->tok; |
michael@0 | 1183 | for (tile_row = 0; tile_row < tile_rows; tile_row++) { |
michael@0 | 1184 | if (tile_row) |
michael@0 | 1185 | tok[tile_row][0] = tok[tile_row - 1][tile_cols - 1] + |
michael@0 | 1186 | cpi->tok_count[tile_row - 1][tile_cols - 1]; |
michael@0 | 1187 | |
michael@0 | 1188 | for (tile_col = 1; tile_col < tile_cols; tile_col++) |
michael@0 | 1189 | tok[tile_row][tile_col] = tok[tile_row][tile_col - 1] + |
michael@0 | 1190 | cpi->tok_count[tile_row][tile_col - 1]; |
michael@0 | 1191 | } |
michael@0 | 1192 | |
michael@0 | 1193 | for (tile_row = 0; tile_row < tile_rows; tile_row++) { |
michael@0 | 1194 | for (tile_col = 0; tile_col < tile_cols; tile_col++) { |
michael@0 | 1195 | TileInfo tile; |
michael@0 | 1196 | |
michael@0 | 1197 | vp9_tile_init(&tile, cm, tile_row, tile_col); |
michael@0 | 1198 | tok_end = tok[tile_row][tile_col] + cpi->tok_count[tile_row][tile_col]; |
michael@0 | 1199 | |
michael@0 | 1200 | if (tile_col < tile_cols - 1 || tile_row < tile_rows - 1) |
michael@0 | 1201 | vp9_start_encode(&residual_bc, data_ptr + total_size + 4); |
michael@0 | 1202 | else |
michael@0 | 1203 | vp9_start_encode(&residual_bc, data_ptr + total_size); |
michael@0 | 1204 | |
michael@0 | 1205 | write_modes(cpi, &tile, &residual_bc, &tok[tile_row][tile_col], tok_end); |
michael@0 | 1206 | assert(tok[tile_row][tile_col] == tok_end); |
michael@0 | 1207 | vp9_stop_encode(&residual_bc); |
michael@0 | 1208 | if (tile_col < tile_cols - 1 || tile_row < tile_rows - 1) { |
michael@0 | 1209 | // size of this tile |
michael@0 | 1210 | write_be32(data_ptr + total_size, residual_bc.pos); |
michael@0 | 1211 | total_size += 4; |
michael@0 | 1212 | } |
michael@0 | 1213 | |
michael@0 | 1214 | total_size += residual_bc.pos; |
michael@0 | 1215 | } |
michael@0 | 1216 | } |
michael@0 | 1217 | |
michael@0 | 1218 | return total_size; |
michael@0 | 1219 | } |
michael@0 | 1220 | |
michael@0 | 1221 | static void write_display_size(VP9_COMP *cpi, struct vp9_write_bit_buffer *wb) { |
michael@0 | 1222 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1223 | |
michael@0 | 1224 | const int scaling_active = cm->width != cm->display_width || |
michael@0 | 1225 | cm->height != cm->display_height; |
michael@0 | 1226 | vp9_wb_write_bit(wb, scaling_active); |
michael@0 | 1227 | if (scaling_active) { |
michael@0 | 1228 | vp9_wb_write_literal(wb, cm->display_width - 1, 16); |
michael@0 | 1229 | vp9_wb_write_literal(wb, cm->display_height - 1, 16); |
michael@0 | 1230 | } |
michael@0 | 1231 | } |
michael@0 | 1232 | |
michael@0 | 1233 | static void write_frame_size(VP9_COMP *cpi, |
michael@0 | 1234 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 1235 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1236 | vp9_wb_write_literal(wb, cm->width - 1, 16); |
michael@0 | 1237 | vp9_wb_write_literal(wb, cm->height - 1, 16); |
michael@0 | 1238 | |
michael@0 | 1239 | write_display_size(cpi, wb); |
michael@0 | 1240 | } |
michael@0 | 1241 | |
michael@0 | 1242 | static void write_frame_size_with_refs(VP9_COMP *cpi, |
michael@0 | 1243 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 1244 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1245 | int refs[ALLOWED_REFS_PER_FRAME] = {cpi->lst_fb_idx, cpi->gld_fb_idx, |
michael@0 | 1246 | cpi->alt_fb_idx}; |
michael@0 | 1247 | int i, found = 0; |
michael@0 | 1248 | |
michael@0 | 1249 | for (i = 0; i < ALLOWED_REFS_PER_FRAME; ++i) { |
michael@0 | 1250 | YV12_BUFFER_CONFIG *cfg = &cm->yv12_fb[cm->ref_frame_map[refs[i]]]; |
michael@0 | 1251 | found = cm->width == cfg->y_crop_width && |
michael@0 | 1252 | cm->height == cfg->y_crop_height; |
michael@0 | 1253 | |
michael@0 | 1254 | // TODO(ivan): This prevents a bug while more than 3 buffers are used. Do it |
michael@0 | 1255 | // in a better way. |
michael@0 | 1256 | if (cpi->use_svc) { |
michael@0 | 1257 | found = 0; |
michael@0 | 1258 | } |
michael@0 | 1259 | vp9_wb_write_bit(wb, found); |
michael@0 | 1260 | if (found) { |
michael@0 | 1261 | break; |
michael@0 | 1262 | } |
michael@0 | 1263 | } |
michael@0 | 1264 | |
michael@0 | 1265 | if (!found) { |
michael@0 | 1266 | vp9_wb_write_literal(wb, cm->width - 1, 16); |
michael@0 | 1267 | vp9_wb_write_literal(wb, cm->height - 1, 16); |
michael@0 | 1268 | } |
michael@0 | 1269 | |
michael@0 | 1270 | write_display_size(cpi, wb); |
michael@0 | 1271 | } |
michael@0 | 1272 | |
michael@0 | 1273 | static void write_sync_code(struct vp9_write_bit_buffer *wb) { |
michael@0 | 1274 | vp9_wb_write_literal(wb, VP9_SYNC_CODE_0, 8); |
michael@0 | 1275 | vp9_wb_write_literal(wb, VP9_SYNC_CODE_1, 8); |
michael@0 | 1276 | vp9_wb_write_literal(wb, VP9_SYNC_CODE_2, 8); |
michael@0 | 1277 | } |
michael@0 | 1278 | |
michael@0 | 1279 | static void write_uncompressed_header(VP9_COMP *cpi, |
michael@0 | 1280 | struct vp9_write_bit_buffer *wb) { |
michael@0 | 1281 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1282 | |
michael@0 | 1283 | vp9_wb_write_literal(wb, VP9_FRAME_MARKER, 2); |
michael@0 | 1284 | |
michael@0 | 1285 | // bitstream version. |
michael@0 | 1286 | // 00 - profile 0. 4:2:0 only |
michael@0 | 1287 | // 10 - profile 1. adds 4:4:4, 4:2:2, alpha |
michael@0 | 1288 | vp9_wb_write_bit(wb, cm->version); |
michael@0 | 1289 | vp9_wb_write_bit(wb, 0); |
michael@0 | 1290 | |
michael@0 | 1291 | vp9_wb_write_bit(wb, 0); |
michael@0 | 1292 | vp9_wb_write_bit(wb, cm->frame_type); |
michael@0 | 1293 | vp9_wb_write_bit(wb, cm->show_frame); |
michael@0 | 1294 | vp9_wb_write_bit(wb, cm->error_resilient_mode); |
michael@0 | 1295 | |
michael@0 | 1296 | if (cm->frame_type == KEY_FRAME) { |
michael@0 | 1297 | const COLOR_SPACE cs = UNKNOWN; |
michael@0 | 1298 | write_sync_code(wb); |
michael@0 | 1299 | vp9_wb_write_literal(wb, cs, 3); |
michael@0 | 1300 | if (cs != SRGB) { |
michael@0 | 1301 | vp9_wb_write_bit(wb, 0); // 0: [16, 235] (i.e. xvYCC), 1: [0, 255] |
michael@0 | 1302 | if (cm->version == 1) { |
michael@0 | 1303 | vp9_wb_write_bit(wb, cm->subsampling_x); |
michael@0 | 1304 | vp9_wb_write_bit(wb, cm->subsampling_y); |
michael@0 | 1305 | vp9_wb_write_bit(wb, 0); // has extra plane |
michael@0 | 1306 | } |
michael@0 | 1307 | } else { |
michael@0 | 1308 | assert(cm->version == 1); |
michael@0 | 1309 | vp9_wb_write_bit(wb, 0); // has extra plane |
michael@0 | 1310 | } |
michael@0 | 1311 | |
michael@0 | 1312 | write_frame_size(cpi, wb); |
michael@0 | 1313 | } else { |
michael@0 | 1314 | const int refs[ALLOWED_REFS_PER_FRAME] = {cpi->lst_fb_idx, cpi->gld_fb_idx, |
michael@0 | 1315 | cpi->alt_fb_idx}; |
michael@0 | 1316 | if (!cm->show_frame) |
michael@0 | 1317 | vp9_wb_write_bit(wb, cm->intra_only); |
michael@0 | 1318 | |
michael@0 | 1319 | if (!cm->error_resilient_mode) |
michael@0 | 1320 | vp9_wb_write_literal(wb, cm->reset_frame_context, 2); |
michael@0 | 1321 | |
michael@0 | 1322 | if (cm->intra_only) { |
michael@0 | 1323 | write_sync_code(wb); |
michael@0 | 1324 | |
michael@0 | 1325 | vp9_wb_write_literal(wb, get_refresh_mask(cpi), NUM_REF_FRAMES); |
michael@0 | 1326 | write_frame_size(cpi, wb); |
michael@0 | 1327 | } else { |
michael@0 | 1328 | int i; |
michael@0 | 1329 | vp9_wb_write_literal(wb, get_refresh_mask(cpi), NUM_REF_FRAMES); |
michael@0 | 1330 | for (i = 0; i < ALLOWED_REFS_PER_FRAME; ++i) { |
michael@0 | 1331 | vp9_wb_write_literal(wb, refs[i], NUM_REF_FRAMES_LOG2); |
michael@0 | 1332 | vp9_wb_write_bit(wb, cm->ref_frame_sign_bias[LAST_FRAME + i]); |
michael@0 | 1333 | } |
michael@0 | 1334 | |
michael@0 | 1335 | write_frame_size_with_refs(cpi, wb); |
michael@0 | 1336 | |
michael@0 | 1337 | vp9_wb_write_bit(wb, cm->allow_high_precision_mv); |
michael@0 | 1338 | |
michael@0 | 1339 | fix_mcomp_filter_type(cpi); |
michael@0 | 1340 | write_interp_filter_type(cm->mcomp_filter_type, wb); |
michael@0 | 1341 | } |
michael@0 | 1342 | } |
michael@0 | 1343 | |
michael@0 | 1344 | if (!cm->error_resilient_mode) { |
michael@0 | 1345 | vp9_wb_write_bit(wb, cm->refresh_frame_context); |
michael@0 | 1346 | vp9_wb_write_bit(wb, cm->frame_parallel_decoding_mode); |
michael@0 | 1347 | } |
michael@0 | 1348 | |
michael@0 | 1349 | vp9_wb_write_literal(wb, cm->frame_context_idx, NUM_FRAME_CONTEXTS_LOG2); |
michael@0 | 1350 | |
michael@0 | 1351 | encode_loopfilter(&cm->lf, wb); |
michael@0 | 1352 | encode_quantization(cm, wb); |
michael@0 | 1353 | encode_segmentation(cpi, wb); |
michael@0 | 1354 | |
michael@0 | 1355 | write_tile_info(cm, wb); |
michael@0 | 1356 | } |
michael@0 | 1357 | |
michael@0 | 1358 | static size_t write_compressed_header(VP9_COMP *cpi, uint8_t *data) { |
michael@0 | 1359 | VP9_COMMON *const cm = &cpi->common; |
michael@0 | 1360 | MACROBLOCKD *const xd = &cpi->mb.e_mbd; |
michael@0 | 1361 | FRAME_CONTEXT *const fc = &cm->fc; |
michael@0 | 1362 | vp9_writer header_bc; |
michael@0 | 1363 | |
michael@0 | 1364 | vp9_start_encode(&header_bc, data); |
michael@0 | 1365 | |
michael@0 | 1366 | if (xd->lossless) |
michael@0 | 1367 | cm->tx_mode = ONLY_4X4; |
michael@0 | 1368 | else |
michael@0 | 1369 | encode_txfm_probs(cpi, &header_bc); |
michael@0 | 1370 | |
michael@0 | 1371 | update_coef_probs(cpi, &header_bc); |
michael@0 | 1372 | |
michael@0 | 1373 | #ifdef ENTROPY_STATS |
michael@0 | 1374 | active_section = 2; |
michael@0 | 1375 | #endif |
michael@0 | 1376 | |
michael@0 | 1377 | vp9_update_skip_probs(cpi, &header_bc); |
michael@0 | 1378 | |
michael@0 | 1379 | if (!frame_is_intra_only(cm)) { |
michael@0 | 1380 | int i; |
michael@0 | 1381 | #ifdef ENTROPY_STATS |
michael@0 | 1382 | active_section = 1; |
michael@0 | 1383 | #endif |
michael@0 | 1384 | |
michael@0 | 1385 | update_inter_mode_probs(cm, &header_bc); |
michael@0 | 1386 | vp9_zero(cm->counts.inter_mode); |
michael@0 | 1387 | |
michael@0 | 1388 | if (cm->mcomp_filter_type == SWITCHABLE) |
michael@0 | 1389 | update_switchable_interp_probs(cpi, &header_bc); |
michael@0 | 1390 | |
michael@0 | 1391 | for (i = 0; i < INTRA_INTER_CONTEXTS; i++) |
michael@0 | 1392 | vp9_cond_prob_diff_update(&header_bc, &fc->intra_inter_prob[i], |
michael@0 | 1393 | cpi->intra_inter_count[i]); |
michael@0 | 1394 | |
michael@0 | 1395 | if (cm->allow_comp_inter_inter) { |
michael@0 | 1396 | const int comp_pred_mode = cpi->common.comp_pred_mode; |
michael@0 | 1397 | const int use_compound_pred = comp_pred_mode != SINGLE_PREDICTION_ONLY; |
michael@0 | 1398 | const int use_hybrid_pred = comp_pred_mode == HYBRID_PREDICTION; |
michael@0 | 1399 | |
michael@0 | 1400 | vp9_write_bit(&header_bc, use_compound_pred); |
michael@0 | 1401 | if (use_compound_pred) { |
michael@0 | 1402 | vp9_write_bit(&header_bc, use_hybrid_pred); |
michael@0 | 1403 | if (use_hybrid_pred) |
michael@0 | 1404 | for (i = 0; i < COMP_INTER_CONTEXTS; i++) |
michael@0 | 1405 | vp9_cond_prob_diff_update(&header_bc, &fc->comp_inter_prob[i], |
michael@0 | 1406 | cpi->comp_inter_count[i]); |
michael@0 | 1407 | } |
michael@0 | 1408 | } |
michael@0 | 1409 | |
michael@0 | 1410 | if (cm->comp_pred_mode != COMP_PREDICTION_ONLY) { |
michael@0 | 1411 | for (i = 0; i < REF_CONTEXTS; i++) { |
michael@0 | 1412 | vp9_cond_prob_diff_update(&header_bc, &fc->single_ref_prob[i][0], |
michael@0 | 1413 | cpi->single_ref_count[i][0]); |
michael@0 | 1414 | vp9_cond_prob_diff_update(&header_bc, &fc->single_ref_prob[i][1], |
michael@0 | 1415 | cpi->single_ref_count[i][1]); |
michael@0 | 1416 | } |
michael@0 | 1417 | } |
michael@0 | 1418 | |
michael@0 | 1419 | if (cm->comp_pred_mode != SINGLE_PREDICTION_ONLY) |
michael@0 | 1420 | for (i = 0; i < REF_CONTEXTS; i++) |
michael@0 | 1421 | vp9_cond_prob_diff_update(&header_bc, &fc->comp_ref_prob[i], |
michael@0 | 1422 | cpi->comp_ref_count[i]); |
michael@0 | 1423 | |
michael@0 | 1424 | update_mbintra_mode_probs(cpi, &header_bc); |
michael@0 | 1425 | |
michael@0 | 1426 | for (i = 0; i < PARTITION_CONTEXTS; ++i) { |
michael@0 | 1427 | unsigned int bct[PARTITION_TYPES - 1][2]; |
michael@0 | 1428 | update_mode(&header_bc, PARTITION_TYPES, vp9_partition_tree, |
michael@0 | 1429 | fc->partition_prob[i], bct, |
michael@0 | 1430 | (unsigned int *)cpi->partition_count[i]); |
michael@0 | 1431 | } |
michael@0 | 1432 | |
michael@0 | 1433 | vp9_write_nmv_probs(cpi, cm->allow_high_precision_mv, &header_bc); |
michael@0 | 1434 | } |
michael@0 | 1435 | |
michael@0 | 1436 | vp9_stop_encode(&header_bc); |
michael@0 | 1437 | assert(header_bc.pos <= 0xffff); |
michael@0 | 1438 | |
michael@0 | 1439 | return header_bc.pos; |
michael@0 | 1440 | } |
michael@0 | 1441 | |
michael@0 | 1442 | void vp9_pack_bitstream(VP9_COMP *cpi, uint8_t *dest, unsigned long *size) { |
michael@0 | 1443 | uint8_t *data = dest; |
michael@0 | 1444 | size_t first_part_size; |
michael@0 | 1445 | struct vp9_write_bit_buffer wb = {data, 0}; |
michael@0 | 1446 | struct vp9_write_bit_buffer saved_wb; |
michael@0 | 1447 | |
michael@0 | 1448 | write_uncompressed_header(cpi, &wb); |
michael@0 | 1449 | saved_wb = wb; |
michael@0 | 1450 | vp9_wb_write_literal(&wb, 0, 16); // don't know in advance first part. size |
michael@0 | 1451 | |
michael@0 | 1452 | data += vp9_rb_bytes_written(&wb); |
michael@0 | 1453 | |
michael@0 | 1454 | vp9_compute_update_table(); |
michael@0 | 1455 | |
michael@0 | 1456 | #ifdef ENTROPY_STATS |
michael@0 | 1457 | if (cm->frame_type == INTER_FRAME) |
michael@0 | 1458 | active_section = 0; |
michael@0 | 1459 | else |
michael@0 | 1460 | active_section = 7; |
michael@0 | 1461 | #endif |
michael@0 | 1462 | |
michael@0 | 1463 | vp9_clear_system_state(); // __asm emms; |
michael@0 | 1464 | |
michael@0 | 1465 | first_part_size = write_compressed_header(cpi, data); |
michael@0 | 1466 | data += first_part_size; |
michael@0 | 1467 | vp9_wb_write_literal(&saved_wb, first_part_size, 16); |
michael@0 | 1468 | |
michael@0 | 1469 | data += encode_tiles(cpi, data); |
michael@0 | 1470 | |
michael@0 | 1471 | *size = data - dest; |
michael@0 | 1472 | } |
michael@0 | 1473 | |
michael@0 | 1474 | #ifdef ENTROPY_STATS |
michael@0 | 1475 | static void print_tree_update_for_type(FILE *f, |
michael@0 | 1476 | vp9_coeff_stats *tree_update_hist, |
michael@0 | 1477 | int block_types, const char *header) { |
michael@0 | 1478 | int i, j, k, l, m; |
michael@0 | 1479 | |
michael@0 | 1480 | fprintf(f, "const vp9_coeff_prob %s = {\n", header); |
michael@0 | 1481 | for (i = 0; i < block_types; i++) { |
michael@0 | 1482 | fprintf(f, " { \n"); |
michael@0 | 1483 | for (j = 0; j < REF_TYPES; j++) { |
michael@0 | 1484 | fprintf(f, " { \n"); |
michael@0 | 1485 | for (k = 0; k < COEF_BANDS; k++) { |
michael@0 | 1486 | fprintf(f, " {\n"); |
michael@0 | 1487 | for (l = 0; l < PREV_COEF_CONTEXTS; l++) { |
michael@0 | 1488 | fprintf(f, " {"); |
michael@0 | 1489 | for (m = 0; m < ENTROPY_NODES; m++) { |
michael@0 | 1490 | fprintf(f, "%3d, ", |
michael@0 | 1491 | get_binary_prob(tree_update_hist[i][j][k][l][m][0], |
michael@0 | 1492 | tree_update_hist[i][j][k][l][m][1])); |
michael@0 | 1493 | } |
michael@0 | 1494 | fprintf(f, "},\n"); |
michael@0 | 1495 | } |
michael@0 | 1496 | fprintf(f, "},\n"); |
michael@0 | 1497 | } |
michael@0 | 1498 | fprintf(f, " },\n"); |
michael@0 | 1499 | } |
michael@0 | 1500 | fprintf(f, " },\n"); |
michael@0 | 1501 | } |
michael@0 | 1502 | fprintf(f, "};\n"); |
michael@0 | 1503 | } |
michael@0 | 1504 | |
michael@0 | 1505 | void print_tree_update_probs() { |
michael@0 | 1506 | FILE *f = fopen("coefupdprob.h", "w"); |
michael@0 | 1507 | fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n"); |
michael@0 | 1508 | |
michael@0 | 1509 | print_tree_update_for_type(f, tree_update_hist[TX_4X4], BLOCK_TYPES, |
michael@0 | 1510 | "vp9_coef_update_probs_4x4[BLOCK_TYPES]"); |
michael@0 | 1511 | print_tree_update_for_type(f, tree_update_hist[TX_8X8], BLOCK_TYPES, |
michael@0 | 1512 | "vp9_coef_update_probs_8x8[BLOCK_TYPES]"); |
michael@0 | 1513 | print_tree_update_for_type(f, tree_update_hist[TX_16X16], BLOCK_TYPES, |
michael@0 | 1514 | "vp9_coef_update_probs_16x16[BLOCK_TYPES]"); |
michael@0 | 1515 | print_tree_update_for_type(f, tree_update_hist[TX_32X32], BLOCK_TYPES, |
michael@0 | 1516 | "vp9_coef_update_probs_32x32[BLOCK_TYPES]"); |
michael@0 | 1517 | |
michael@0 | 1518 | fclose(f); |
michael@0 | 1519 | f = fopen("treeupdate.bin", "wb"); |
michael@0 | 1520 | fwrite(tree_update_hist, sizeof(tree_update_hist), 1, f); |
michael@0 | 1521 | fclose(f); |
michael@0 | 1522 | } |
michael@0 | 1523 | #endif |