|
1 /* |
|
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
|
3 * |
|
4 * Use of this source code is governed by a BSD-style license |
|
5 * that can be found in the LICENSE file in the root of the source |
|
6 * tree. An additional intellectual property rights grant can be found |
|
7 * in the file PATENTS. All contributing project authors may |
|
8 * be found in the AUTHORS file in the root of the source tree. |
|
9 */ |
|
10 |
|
11 |
|
12 #include "vp8/common/header.h" |
|
13 #include "encodemv.h" |
|
14 #include "vp8/common/entropymode.h" |
|
15 #include "vp8/common/findnearmv.h" |
|
16 #include "mcomp.h" |
|
17 #include "vp8/common/systemdependent.h" |
|
18 #include <assert.h> |
|
19 #include <stdio.h> |
|
20 #include <limits.h> |
|
21 #include "vp8/common/pragmas.h" |
|
22 #include "vpx/vpx_encoder.h" |
|
23 #include "vpx_mem/vpx_mem.h" |
|
24 #include "bitstream.h" |
|
25 |
|
26 #include "defaultcoefcounts.h" |
|
27 #include "vp8/common/common.h" |
|
28 |
|
29 const int vp8cx_base_skip_false_prob[128] = |
|
30 { |
|
31 255, 255, 255, 255, 255, 255, 255, 255, |
|
32 255, 255, 255, 255, 255, 255, 255, 255, |
|
33 255, 255, 255, 255, 255, 255, 255, 255, |
|
34 255, 255, 255, 255, 255, 255, 255, 255, |
|
35 255, 255, 255, 255, 255, 255, 255, 255, |
|
36 255, 255, 255, 255, 255, 255, 255, 255, |
|
37 255, 255, 255, 255, 255, 255, 255, 255, |
|
38 251, 248, 244, 240, 236, 232, 229, 225, |
|
39 221, 217, 213, 208, 204, 199, 194, 190, |
|
40 187, 183, 179, 175, 172, 168, 164, 160, |
|
41 157, 153, 149, 145, 142, 138, 134, 130, |
|
42 127, 124, 120, 117, 114, 110, 107, 104, |
|
43 101, 98, 95, 92, 89, 86, 83, 80, |
|
44 77, 74, 71, 68, 65, 62, 59, 56, |
|
45 53, 50, 47, 44, 41, 38, 35, 32, |
|
46 30, 28, 26, 24, 22, 20, 18, 16, |
|
47 }; |
|
48 |
|
49 #if defined(SECTIONBITS_OUTPUT) |
|
50 unsigned __int64 Sectionbits[500]; |
|
51 #endif |
|
52 |
|
53 #ifdef VP8_ENTROPY_STATS |
|
54 int intra_mode_stats[10][10][10]; |
|
55 static unsigned int tree_update_hist [BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [ENTROPY_NODES] [2]; |
|
56 extern unsigned int active_section; |
|
57 #endif |
|
58 |
|
59 #ifdef MODE_STATS |
|
60 int count_mb_seg[4] = { 0, 0, 0, 0 }; |
|
61 #endif |
|
62 |
|
63 |
|
64 static void update_mode( |
|
65 vp8_writer *const w, |
|
66 int n, |
|
67 vp8_token tok [/* n */], |
|
68 vp8_tree tree, |
|
69 vp8_prob Pnew [/* n-1 */], |
|
70 vp8_prob Pcur [/* n-1 */], |
|
71 unsigned int bct [/* n-1 */] [2], |
|
72 const unsigned int num_events[/* n */] |
|
73 ) |
|
74 { |
|
75 unsigned int new_b = 0, old_b = 0; |
|
76 int i = 0; |
|
77 |
|
78 vp8_tree_probs_from_distribution( |
|
79 n--, tok, tree, |
|
80 Pnew, bct, num_events, |
|
81 256, 1 |
|
82 ); |
|
83 |
|
84 do |
|
85 { |
|
86 new_b += vp8_cost_branch(bct[i], Pnew[i]); |
|
87 old_b += vp8_cost_branch(bct[i], Pcur[i]); |
|
88 } |
|
89 while (++i < n); |
|
90 |
|
91 if (new_b + (n << 8) < old_b) |
|
92 { |
|
93 int j = 0; |
|
94 |
|
95 vp8_write_bit(w, 1); |
|
96 |
|
97 do |
|
98 { |
|
99 const vp8_prob p = Pnew[j]; |
|
100 |
|
101 vp8_write_literal(w, Pcur[j] = p ? p : 1, 8); |
|
102 } |
|
103 while (++j < n); |
|
104 } |
|
105 else |
|
106 vp8_write_bit(w, 0); |
|
107 } |
|
108 |
|
109 static void update_mbintra_mode_probs(VP8_COMP *cpi) |
|
110 { |
|
111 VP8_COMMON *const x = & cpi->common; |
|
112 |
|
113 vp8_writer *const w = cpi->bc; |
|
114 |
|
115 { |
|
116 vp8_prob Pnew [VP8_YMODES-1]; |
|
117 unsigned int bct [VP8_YMODES-1] [2]; |
|
118 |
|
119 update_mode( |
|
120 w, VP8_YMODES, vp8_ymode_encodings, vp8_ymode_tree, |
|
121 Pnew, x->fc.ymode_prob, bct, (unsigned int *)cpi->mb.ymode_count |
|
122 ); |
|
123 } |
|
124 { |
|
125 vp8_prob Pnew [VP8_UV_MODES-1]; |
|
126 unsigned int bct [VP8_UV_MODES-1] [2]; |
|
127 |
|
128 update_mode( |
|
129 w, VP8_UV_MODES, vp8_uv_mode_encodings, vp8_uv_mode_tree, |
|
130 Pnew, x->fc.uv_mode_prob, bct, (unsigned int *)cpi->mb.uv_mode_count |
|
131 ); |
|
132 } |
|
133 } |
|
134 |
|
135 static void write_ymode(vp8_writer *bc, int m, const vp8_prob *p) |
|
136 { |
|
137 vp8_write_token(bc, vp8_ymode_tree, p, vp8_ymode_encodings + m); |
|
138 } |
|
139 |
|
140 static void kfwrite_ymode(vp8_writer *bc, int m, const vp8_prob *p) |
|
141 { |
|
142 vp8_write_token(bc, vp8_kf_ymode_tree, p, vp8_kf_ymode_encodings + m); |
|
143 } |
|
144 |
|
145 static void write_uv_mode(vp8_writer *bc, int m, const vp8_prob *p) |
|
146 { |
|
147 vp8_write_token(bc, vp8_uv_mode_tree, p, vp8_uv_mode_encodings + m); |
|
148 } |
|
149 |
|
150 |
|
151 static void write_bmode(vp8_writer *bc, int m, const vp8_prob *p) |
|
152 { |
|
153 vp8_write_token(bc, vp8_bmode_tree, p, vp8_bmode_encodings + m); |
|
154 } |
|
155 |
|
156 static void write_split(vp8_writer *bc, int x) |
|
157 { |
|
158 vp8_write_token( |
|
159 bc, vp8_mbsplit_tree, vp8_mbsplit_probs, vp8_mbsplit_encodings + x |
|
160 ); |
|
161 } |
|
162 |
|
163 void vp8_pack_tokens_c(vp8_writer *w, const TOKENEXTRA *p, int xcount) |
|
164 { |
|
165 const TOKENEXTRA *stop = p + xcount; |
|
166 unsigned int split; |
|
167 unsigned int shift; |
|
168 int count = w->count; |
|
169 unsigned int range = w->range; |
|
170 unsigned int lowvalue = w->lowvalue; |
|
171 |
|
172 while (p < stop) |
|
173 { |
|
174 const int t = p->Token; |
|
175 vp8_token *a = vp8_coef_encodings + t; |
|
176 const vp8_extra_bit_struct *b = vp8_extra_bits + t; |
|
177 int i = 0; |
|
178 const unsigned char *pp = p->context_tree; |
|
179 int v = a->value; |
|
180 int n = a->Len; |
|
181 |
|
182 if (p->skip_eob_node) |
|
183 { |
|
184 n--; |
|
185 i = 2; |
|
186 } |
|
187 |
|
188 do |
|
189 { |
|
190 const int bb = (v >> --n) & 1; |
|
191 split = 1 + (((range - 1) * pp[i>>1]) >> 8); |
|
192 i = vp8_coef_tree[i+bb]; |
|
193 |
|
194 if (bb) |
|
195 { |
|
196 lowvalue += split; |
|
197 range = range - split; |
|
198 } |
|
199 else |
|
200 { |
|
201 range = split; |
|
202 } |
|
203 |
|
204 shift = vp8_norm[range]; |
|
205 range <<= shift; |
|
206 count += shift; |
|
207 |
|
208 if (count >= 0) |
|
209 { |
|
210 int offset = shift - count; |
|
211 |
|
212 if ((lowvalue << (offset - 1)) & 0x80000000) |
|
213 { |
|
214 int x = w->pos - 1; |
|
215 |
|
216 while (x >= 0 && w->buffer[x] == 0xff) |
|
217 { |
|
218 w->buffer[x] = (unsigned char)0; |
|
219 x--; |
|
220 } |
|
221 |
|
222 w->buffer[x] += 1; |
|
223 } |
|
224 |
|
225 validate_buffer(w->buffer + w->pos, |
|
226 1, |
|
227 w->buffer_end, |
|
228 w->error); |
|
229 |
|
230 w->buffer[w->pos++] = (lowvalue >> (24 - offset)); |
|
231 lowvalue <<= offset; |
|
232 shift = count; |
|
233 lowvalue &= 0xffffff; |
|
234 count -= 8 ; |
|
235 } |
|
236 |
|
237 lowvalue <<= shift; |
|
238 } |
|
239 while (n); |
|
240 |
|
241 |
|
242 if (b->base_val) |
|
243 { |
|
244 const int e = p->Extra, L = b->Len; |
|
245 |
|
246 if (L) |
|
247 { |
|
248 const unsigned char *proba = b->prob; |
|
249 const int v2 = e >> 1; |
|
250 int n2 = L; /* number of bits in v2, assumed nonzero */ |
|
251 i = 0; |
|
252 |
|
253 do |
|
254 { |
|
255 const int bb = (v2 >> --n2) & 1; |
|
256 split = 1 + (((range - 1) * proba[i>>1]) >> 8); |
|
257 i = b->tree[i+bb]; |
|
258 |
|
259 if (bb) |
|
260 { |
|
261 lowvalue += split; |
|
262 range = range - split; |
|
263 } |
|
264 else |
|
265 { |
|
266 range = split; |
|
267 } |
|
268 |
|
269 shift = vp8_norm[range]; |
|
270 range <<= shift; |
|
271 count += shift; |
|
272 |
|
273 if (count >= 0) |
|
274 { |
|
275 int offset = shift - count; |
|
276 |
|
277 if ((lowvalue << (offset - 1)) & 0x80000000) |
|
278 { |
|
279 int x = w->pos - 1; |
|
280 |
|
281 while (x >= 0 && w->buffer[x] == 0xff) |
|
282 { |
|
283 w->buffer[x] = (unsigned char)0; |
|
284 x--; |
|
285 } |
|
286 |
|
287 w->buffer[x] += 1; |
|
288 } |
|
289 |
|
290 validate_buffer(w->buffer + w->pos, |
|
291 1, |
|
292 w->buffer_end, |
|
293 w->error); |
|
294 |
|
295 w->buffer[w->pos++] = (lowvalue >> (24 - offset)); |
|
296 lowvalue <<= offset; |
|
297 shift = count; |
|
298 lowvalue &= 0xffffff; |
|
299 count -= 8 ; |
|
300 } |
|
301 |
|
302 lowvalue <<= shift; |
|
303 } |
|
304 while (n2); |
|
305 } |
|
306 |
|
307 |
|
308 { |
|
309 |
|
310 split = (range + 1) >> 1; |
|
311 |
|
312 if (e & 1) |
|
313 { |
|
314 lowvalue += split; |
|
315 range = range - split; |
|
316 } |
|
317 else |
|
318 { |
|
319 range = split; |
|
320 } |
|
321 |
|
322 range <<= 1; |
|
323 |
|
324 if ((lowvalue & 0x80000000)) |
|
325 { |
|
326 int x = w->pos - 1; |
|
327 |
|
328 while (x >= 0 && w->buffer[x] == 0xff) |
|
329 { |
|
330 w->buffer[x] = (unsigned char)0; |
|
331 x--; |
|
332 } |
|
333 |
|
334 w->buffer[x] += 1; |
|
335 |
|
336 } |
|
337 |
|
338 lowvalue <<= 1; |
|
339 |
|
340 if (!++count) |
|
341 { |
|
342 count = -8; |
|
343 |
|
344 validate_buffer(w->buffer + w->pos, |
|
345 1, |
|
346 w->buffer_end, |
|
347 w->error); |
|
348 |
|
349 w->buffer[w->pos++] = (lowvalue >> 24); |
|
350 lowvalue &= 0xffffff; |
|
351 } |
|
352 } |
|
353 |
|
354 } |
|
355 |
|
356 ++p; |
|
357 } |
|
358 |
|
359 w->count = count; |
|
360 w->lowvalue = lowvalue; |
|
361 w->range = range; |
|
362 |
|
363 } |
|
364 |
|
365 static void write_partition_size(unsigned char *cx_data, int size) |
|
366 { |
|
367 signed char csize; |
|
368 |
|
369 csize = size & 0xff; |
|
370 *cx_data = csize; |
|
371 csize = (size >> 8) & 0xff; |
|
372 *(cx_data + 1) = csize; |
|
373 csize = (size >> 16) & 0xff; |
|
374 *(cx_data + 2) = csize; |
|
375 |
|
376 } |
|
377 |
|
378 static void pack_tokens_into_partitions_c(VP8_COMP *cpi, unsigned char *cx_data, |
|
379 unsigned char * cx_data_end, |
|
380 int num_part) |
|
381 { |
|
382 |
|
383 int i; |
|
384 unsigned char *ptr = cx_data; |
|
385 unsigned char *ptr_end = cx_data_end; |
|
386 vp8_writer * w; |
|
387 |
|
388 for (i = 0; i < num_part; i++) |
|
389 { |
|
390 int mb_row; |
|
391 |
|
392 w = cpi->bc + i + 1; |
|
393 |
|
394 vp8_start_encode(w, ptr, ptr_end); |
|
395 |
|
396 for (mb_row = i; mb_row < cpi->common.mb_rows; mb_row += num_part) |
|
397 { |
|
398 const TOKENEXTRA *p = cpi->tplist[mb_row].start; |
|
399 const TOKENEXTRA *stop = cpi->tplist[mb_row].stop; |
|
400 int tokens = (int)(stop - p); |
|
401 |
|
402 vp8_pack_tokens_c(w, p, tokens); |
|
403 } |
|
404 |
|
405 vp8_stop_encode(w); |
|
406 ptr += w->pos; |
|
407 } |
|
408 } |
|
409 |
|
410 |
|
411 static void pack_mb_row_tokens_c(VP8_COMP *cpi, vp8_writer *w) |
|
412 { |
|
413 int mb_row; |
|
414 |
|
415 for (mb_row = 0; mb_row < cpi->common.mb_rows; mb_row++) |
|
416 { |
|
417 const TOKENEXTRA *p = cpi->tplist[mb_row].start; |
|
418 const TOKENEXTRA *stop = cpi->tplist[mb_row].stop; |
|
419 int tokens = (int)(stop - p); |
|
420 |
|
421 vp8_pack_tokens_c(w, p, tokens); |
|
422 } |
|
423 |
|
424 } |
|
425 |
|
426 static void write_mv_ref |
|
427 ( |
|
428 vp8_writer *w, MB_PREDICTION_MODE m, const vp8_prob *p |
|
429 ) |
|
430 { |
|
431 #if CONFIG_DEBUG |
|
432 assert(NEARESTMV <= m && m <= SPLITMV); |
|
433 #endif |
|
434 vp8_write_token(w, vp8_mv_ref_tree, p, |
|
435 vp8_mv_ref_encoding_array + (m - NEARESTMV)); |
|
436 } |
|
437 |
|
438 static void write_sub_mv_ref |
|
439 ( |
|
440 vp8_writer *w, B_PREDICTION_MODE m, const vp8_prob *p |
|
441 ) |
|
442 { |
|
443 #if CONFIG_DEBUG |
|
444 assert(LEFT4X4 <= m && m <= NEW4X4); |
|
445 #endif |
|
446 vp8_write_token(w, vp8_sub_mv_ref_tree, p, |
|
447 vp8_sub_mv_ref_encoding_array + (m - LEFT4X4)); |
|
448 } |
|
449 |
|
450 static void write_mv |
|
451 ( |
|
452 vp8_writer *w, const MV *mv, const int_mv *ref, const MV_CONTEXT *mvc |
|
453 ) |
|
454 { |
|
455 MV e; |
|
456 e.row = mv->row - ref->as_mv.row; |
|
457 e.col = mv->col - ref->as_mv.col; |
|
458 |
|
459 vp8_encode_motion_vector(w, &e, mvc); |
|
460 } |
|
461 |
|
462 static void write_mb_features(vp8_writer *w, const MB_MODE_INFO *mi, const MACROBLOCKD *x) |
|
463 { |
|
464 /* Encode the MB segment id. */ |
|
465 if (x->segmentation_enabled && x->update_mb_segmentation_map) |
|
466 { |
|
467 switch (mi->segment_id) |
|
468 { |
|
469 case 0: |
|
470 vp8_write(w, 0, x->mb_segment_tree_probs[0]); |
|
471 vp8_write(w, 0, x->mb_segment_tree_probs[1]); |
|
472 break; |
|
473 case 1: |
|
474 vp8_write(w, 0, x->mb_segment_tree_probs[0]); |
|
475 vp8_write(w, 1, x->mb_segment_tree_probs[1]); |
|
476 break; |
|
477 case 2: |
|
478 vp8_write(w, 1, x->mb_segment_tree_probs[0]); |
|
479 vp8_write(w, 0, x->mb_segment_tree_probs[2]); |
|
480 break; |
|
481 case 3: |
|
482 vp8_write(w, 1, x->mb_segment_tree_probs[0]); |
|
483 vp8_write(w, 1, x->mb_segment_tree_probs[2]); |
|
484 break; |
|
485 |
|
486 /* TRAP.. This should not happen */ |
|
487 default: |
|
488 vp8_write(w, 0, x->mb_segment_tree_probs[0]); |
|
489 vp8_write(w, 0, x->mb_segment_tree_probs[1]); |
|
490 break; |
|
491 } |
|
492 } |
|
493 } |
|
494 void vp8_convert_rfct_to_prob(VP8_COMP *const cpi) |
|
495 { |
|
496 const int *const rfct = cpi->mb.count_mb_ref_frame_usage; |
|
497 const int rf_intra = rfct[INTRA_FRAME]; |
|
498 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]; |
|
499 |
|
500 /* Calculate the probabilities used to code the ref frame based on usage */ |
|
501 if (!(cpi->prob_intra_coded = rf_intra * 255 / (rf_intra + rf_inter))) |
|
502 cpi->prob_intra_coded = 1; |
|
503 |
|
504 cpi->prob_last_coded = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128; |
|
505 |
|
506 if (!cpi->prob_last_coded) |
|
507 cpi->prob_last_coded = 1; |
|
508 |
|
509 cpi->prob_gf_coded = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) |
|
510 ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128; |
|
511 |
|
512 if (!cpi->prob_gf_coded) |
|
513 cpi->prob_gf_coded = 1; |
|
514 |
|
515 } |
|
516 |
|
517 static void pack_inter_mode_mvs(VP8_COMP *const cpi) |
|
518 { |
|
519 VP8_COMMON *const pc = & cpi->common; |
|
520 vp8_writer *const w = cpi->bc; |
|
521 const MV_CONTEXT *mvc = pc->fc.mvc; |
|
522 |
|
523 |
|
524 MODE_INFO *m = pc->mi; |
|
525 const int mis = pc->mode_info_stride; |
|
526 int mb_row = -1; |
|
527 |
|
528 int prob_skip_false = 0; |
|
529 |
|
530 cpi->mb.partition_info = cpi->mb.pi; |
|
531 |
|
532 vp8_convert_rfct_to_prob(cpi); |
|
533 |
|
534 #ifdef VP8_ENTROPY_STATS |
|
535 active_section = 1; |
|
536 #endif |
|
537 |
|
538 if (pc->mb_no_coeff_skip) |
|
539 { |
|
540 int total_mbs = pc->mb_rows * pc->mb_cols; |
|
541 |
|
542 prob_skip_false = (total_mbs - cpi->mb.skip_true_count ) * 256 / total_mbs; |
|
543 |
|
544 if (prob_skip_false <= 1) |
|
545 prob_skip_false = 1; |
|
546 |
|
547 if (prob_skip_false > 255) |
|
548 prob_skip_false = 255; |
|
549 |
|
550 cpi->prob_skip_false = prob_skip_false; |
|
551 vp8_write_literal(w, prob_skip_false, 8); |
|
552 } |
|
553 |
|
554 vp8_write_literal(w, cpi->prob_intra_coded, 8); |
|
555 vp8_write_literal(w, cpi->prob_last_coded, 8); |
|
556 vp8_write_literal(w, cpi->prob_gf_coded, 8); |
|
557 |
|
558 update_mbintra_mode_probs(cpi); |
|
559 |
|
560 vp8_write_mvprobs(cpi); |
|
561 |
|
562 while (++mb_row < pc->mb_rows) |
|
563 { |
|
564 int mb_col = -1; |
|
565 |
|
566 while (++mb_col < pc->mb_cols) |
|
567 { |
|
568 const MB_MODE_INFO *const mi = & m->mbmi; |
|
569 const MV_REFERENCE_FRAME rf = mi->ref_frame; |
|
570 const MB_PREDICTION_MODE mode = mi->mode; |
|
571 |
|
572 MACROBLOCKD *xd = &cpi->mb.e_mbd; |
|
573 |
|
574 /* Distance of Mb to the various image edges. |
|
575 * These specified to 8th pel as they are always compared to MV |
|
576 * values that are in 1/8th pel units |
|
577 */ |
|
578 xd->mb_to_left_edge = -((mb_col * 16) << 3); |
|
579 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3; |
|
580 xd->mb_to_top_edge = -((mb_row * 16) << 3); |
|
581 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3; |
|
582 |
|
583 #ifdef VP8_ENTROPY_STATS |
|
584 active_section = 9; |
|
585 #endif |
|
586 |
|
587 if (cpi->mb.e_mbd.update_mb_segmentation_map) |
|
588 write_mb_features(w, mi, &cpi->mb.e_mbd); |
|
589 |
|
590 if (pc->mb_no_coeff_skip) |
|
591 vp8_encode_bool(w, m->mbmi.mb_skip_coeff, prob_skip_false); |
|
592 |
|
593 if (rf == INTRA_FRAME) |
|
594 { |
|
595 vp8_write(w, 0, cpi->prob_intra_coded); |
|
596 #ifdef VP8_ENTROPY_STATS |
|
597 active_section = 6; |
|
598 #endif |
|
599 write_ymode(w, mode, pc->fc.ymode_prob); |
|
600 |
|
601 if (mode == B_PRED) |
|
602 { |
|
603 int j = 0; |
|
604 |
|
605 do |
|
606 write_bmode(w, m->bmi[j].as_mode, pc->fc.bmode_prob); |
|
607 while (++j < 16); |
|
608 } |
|
609 |
|
610 write_uv_mode(w, mi->uv_mode, pc->fc.uv_mode_prob); |
|
611 } |
|
612 else /* inter coded */ |
|
613 { |
|
614 int_mv best_mv; |
|
615 vp8_prob mv_ref_p [VP8_MVREFS-1]; |
|
616 |
|
617 vp8_write(w, 1, cpi->prob_intra_coded); |
|
618 |
|
619 if (rf == LAST_FRAME) |
|
620 vp8_write(w, 0, cpi->prob_last_coded); |
|
621 else |
|
622 { |
|
623 vp8_write(w, 1, cpi->prob_last_coded); |
|
624 vp8_write(w, (rf == GOLDEN_FRAME) ? 0 : 1, cpi->prob_gf_coded); |
|
625 } |
|
626 |
|
627 { |
|
628 int_mv n1, n2; |
|
629 int ct[4]; |
|
630 |
|
631 vp8_find_near_mvs(xd, m, &n1, &n2, &best_mv, ct, rf, cpi->common.ref_frame_sign_bias); |
|
632 vp8_clamp_mv2(&best_mv, xd); |
|
633 |
|
634 vp8_mv_ref_probs(mv_ref_p, ct); |
|
635 |
|
636 #ifdef VP8_ENTROPY_STATS |
|
637 accum_mv_refs(mode, ct); |
|
638 #endif |
|
639 |
|
640 } |
|
641 |
|
642 #ifdef VP8_ENTROPY_STATS |
|
643 active_section = 3; |
|
644 #endif |
|
645 |
|
646 write_mv_ref(w, mode, mv_ref_p); |
|
647 |
|
648 switch (mode) /* new, split require MVs */ |
|
649 { |
|
650 case NEWMV: |
|
651 |
|
652 #ifdef VP8_ENTROPY_STATS |
|
653 active_section = 5; |
|
654 #endif |
|
655 |
|
656 write_mv(w, &mi->mv.as_mv, &best_mv, mvc); |
|
657 break; |
|
658 |
|
659 case SPLITMV: |
|
660 { |
|
661 int j = 0; |
|
662 |
|
663 #ifdef MODE_STATS |
|
664 ++count_mb_seg [mi->partitioning]; |
|
665 #endif |
|
666 |
|
667 write_split(w, mi->partitioning); |
|
668 |
|
669 do |
|
670 { |
|
671 B_PREDICTION_MODE blockmode; |
|
672 int_mv blockmv; |
|
673 const int *const L = vp8_mbsplits [mi->partitioning]; |
|
674 int k = -1; /* first block in subset j */ |
|
675 int mv_contz; |
|
676 int_mv leftmv, abovemv; |
|
677 |
|
678 blockmode = cpi->mb.partition_info->bmi[j].mode; |
|
679 blockmv = cpi->mb.partition_info->bmi[j].mv; |
|
680 #if CONFIG_DEBUG |
|
681 while (j != L[++k]) |
|
682 if (k >= 16) |
|
683 assert(0); |
|
684 #else |
|
685 while (j != L[++k]); |
|
686 #endif |
|
687 leftmv.as_int = left_block_mv(m, k); |
|
688 abovemv.as_int = above_block_mv(m, k, mis); |
|
689 mv_contz = vp8_mv_cont(&leftmv, &abovemv); |
|
690 |
|
691 write_sub_mv_ref(w, blockmode, vp8_sub_mv_ref_prob2 [mv_contz]); |
|
692 |
|
693 if (blockmode == NEW4X4) |
|
694 { |
|
695 #ifdef VP8_ENTROPY_STATS |
|
696 active_section = 11; |
|
697 #endif |
|
698 write_mv(w, &blockmv.as_mv, &best_mv, (const MV_CONTEXT *) mvc); |
|
699 } |
|
700 } |
|
701 while (++j < cpi->mb.partition_info->count); |
|
702 } |
|
703 break; |
|
704 default: |
|
705 break; |
|
706 } |
|
707 } |
|
708 |
|
709 ++m; |
|
710 cpi->mb.partition_info++; |
|
711 } |
|
712 |
|
713 ++m; /* skip L prediction border */ |
|
714 cpi->mb.partition_info++; |
|
715 } |
|
716 } |
|
717 |
|
718 |
|
719 static void write_kfmodes(VP8_COMP *cpi) |
|
720 { |
|
721 vp8_writer *const bc = cpi->bc; |
|
722 const VP8_COMMON *const c = & cpi->common; |
|
723 /* const */ |
|
724 MODE_INFO *m = c->mi; |
|
725 |
|
726 int mb_row = -1; |
|
727 int prob_skip_false = 0; |
|
728 |
|
729 if (c->mb_no_coeff_skip) |
|
730 { |
|
731 int total_mbs = c->mb_rows * c->mb_cols; |
|
732 |
|
733 prob_skip_false = (total_mbs - cpi->mb.skip_true_count ) * 256 / total_mbs; |
|
734 |
|
735 if (prob_skip_false <= 1) |
|
736 prob_skip_false = 1; |
|
737 |
|
738 if (prob_skip_false >= 255) |
|
739 prob_skip_false = 255; |
|
740 |
|
741 cpi->prob_skip_false = prob_skip_false; |
|
742 vp8_write_literal(bc, prob_skip_false, 8); |
|
743 } |
|
744 |
|
745 while (++mb_row < c->mb_rows) |
|
746 { |
|
747 int mb_col = -1; |
|
748 |
|
749 while (++mb_col < c->mb_cols) |
|
750 { |
|
751 const int ym = m->mbmi.mode; |
|
752 |
|
753 if (cpi->mb.e_mbd.update_mb_segmentation_map) |
|
754 write_mb_features(bc, &m->mbmi, &cpi->mb.e_mbd); |
|
755 |
|
756 if (c->mb_no_coeff_skip) |
|
757 vp8_encode_bool(bc, m->mbmi.mb_skip_coeff, prob_skip_false); |
|
758 |
|
759 kfwrite_ymode(bc, ym, vp8_kf_ymode_prob); |
|
760 |
|
761 if (ym == B_PRED) |
|
762 { |
|
763 const int mis = c->mode_info_stride; |
|
764 int i = 0; |
|
765 |
|
766 do |
|
767 { |
|
768 const B_PREDICTION_MODE A = above_block_mode(m, i, mis); |
|
769 const B_PREDICTION_MODE L = left_block_mode(m, i); |
|
770 const int bm = m->bmi[i].as_mode; |
|
771 |
|
772 #ifdef VP8_ENTROPY_STATS |
|
773 ++intra_mode_stats [A] [L] [bm]; |
|
774 #endif |
|
775 |
|
776 write_bmode(bc, bm, vp8_kf_bmode_prob [A] [L]); |
|
777 } |
|
778 while (++i < 16); |
|
779 } |
|
780 |
|
781 write_uv_mode(bc, (m++)->mbmi.uv_mode, vp8_kf_uv_mode_prob); |
|
782 } |
|
783 |
|
784 m++; /* skip L prediction border */ |
|
785 } |
|
786 } |
|
787 |
|
788 #if 0 |
|
789 /* This function is used for debugging probability trees. */ |
|
790 static void print_prob_tree(vp8_prob |
|
791 coef_probs[BLOCK_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS][ENTROPY_NODES]) |
|
792 { |
|
793 /* print coef probability tree */ |
|
794 int i,j,k,l; |
|
795 FILE* f = fopen("enc_tree_probs.txt", "a"); |
|
796 fprintf(f, "{\n"); |
|
797 for (i = 0; i < BLOCK_TYPES; i++) |
|
798 { |
|
799 fprintf(f, " {\n"); |
|
800 for (j = 0; j < COEF_BANDS; j++) |
|
801 { |
|
802 fprintf(f, " {\n"); |
|
803 for (k = 0; k < PREV_COEF_CONTEXTS; k++) |
|
804 { |
|
805 fprintf(f, " {"); |
|
806 for (l = 0; l < ENTROPY_NODES; l++) |
|
807 { |
|
808 fprintf(f, "%3u, ", |
|
809 (unsigned int)(coef_probs [i][j][k][l])); |
|
810 } |
|
811 fprintf(f, " }\n"); |
|
812 } |
|
813 fprintf(f, " }\n"); |
|
814 } |
|
815 fprintf(f, " }\n"); |
|
816 } |
|
817 fprintf(f, "}\n"); |
|
818 fclose(f); |
|
819 } |
|
820 #endif |
|
821 |
|
822 static void sum_probs_over_prev_coef_context( |
|
823 const unsigned int probs[PREV_COEF_CONTEXTS][MAX_ENTROPY_TOKENS], |
|
824 unsigned int* out) |
|
825 { |
|
826 int i, j; |
|
827 for (i=0; i < MAX_ENTROPY_TOKENS; ++i) |
|
828 { |
|
829 for (j=0; j < PREV_COEF_CONTEXTS; ++j) |
|
830 { |
|
831 const unsigned int tmp = out[i]; |
|
832 out[i] += probs[j][i]; |
|
833 /* check for wrap */ |
|
834 if (out[i] < tmp) |
|
835 out[i] = UINT_MAX; |
|
836 } |
|
837 } |
|
838 } |
|
839 |
|
840 static int prob_update_savings(const unsigned int *ct, |
|
841 const vp8_prob oldp, const vp8_prob newp, |
|
842 const vp8_prob upd) |
|
843 { |
|
844 const int old_b = vp8_cost_branch(ct, oldp); |
|
845 const int new_b = vp8_cost_branch(ct, newp); |
|
846 const int update_b = 8 + |
|
847 ((vp8_cost_one(upd) - vp8_cost_zero(upd)) >> 8); |
|
848 |
|
849 return old_b - new_b - update_b; |
|
850 } |
|
851 |
|
852 static int independent_coef_context_savings(VP8_COMP *cpi) |
|
853 { |
|
854 MACROBLOCK *const x = & cpi->mb; |
|
855 int savings = 0; |
|
856 int i = 0; |
|
857 do |
|
858 { |
|
859 int j = 0; |
|
860 do |
|
861 { |
|
862 int k = 0; |
|
863 unsigned int prev_coef_count_sum[MAX_ENTROPY_TOKENS] = {0}; |
|
864 int prev_coef_savings[MAX_ENTROPY_TOKENS] = {0}; |
|
865 const unsigned int (*probs)[MAX_ENTROPY_TOKENS]; |
|
866 /* Calculate new probabilities given the constraint that |
|
867 * they must be equal over the prev coef contexts |
|
868 */ |
|
869 |
|
870 probs = (const unsigned int (*)[MAX_ENTROPY_TOKENS]) |
|
871 x->coef_counts[i][j]; |
|
872 |
|
873 /* Reset to default probabilities at key frames */ |
|
874 if (cpi->common.frame_type == KEY_FRAME) |
|
875 probs = default_coef_counts[i][j]; |
|
876 |
|
877 sum_probs_over_prev_coef_context(probs, prev_coef_count_sum); |
|
878 |
|
879 do |
|
880 { |
|
881 /* at every context */ |
|
882 |
|
883 /* calc probs and branch cts for this frame only */ |
|
884 int t = 0; /* token/prob index */ |
|
885 |
|
886 vp8_tree_probs_from_distribution( |
|
887 MAX_ENTROPY_TOKENS, vp8_coef_encodings, vp8_coef_tree, |
|
888 cpi->frame_coef_probs[i][j][k], |
|
889 cpi->frame_branch_ct [i][j][k], |
|
890 prev_coef_count_sum, |
|
891 256, 1); |
|
892 |
|
893 do |
|
894 { |
|
895 const unsigned int *ct = cpi->frame_branch_ct [i][j][k][t]; |
|
896 const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t]; |
|
897 const vp8_prob oldp = cpi->common.fc.coef_probs [i][j][k][t]; |
|
898 const vp8_prob upd = vp8_coef_update_probs [i][j][k][t]; |
|
899 const int s = prob_update_savings(ct, oldp, newp, upd); |
|
900 |
|
901 if (cpi->common.frame_type != KEY_FRAME || |
|
902 (cpi->common.frame_type == KEY_FRAME && newp != oldp)) |
|
903 prev_coef_savings[t] += s; |
|
904 } |
|
905 while (++t < ENTROPY_NODES); |
|
906 } |
|
907 while (++k < PREV_COEF_CONTEXTS); |
|
908 k = 0; |
|
909 do |
|
910 { |
|
911 /* We only update probabilities if we can save bits, except |
|
912 * for key frames where we have to update all probabilities |
|
913 * to get the equal probabilities across the prev coef |
|
914 * contexts. |
|
915 */ |
|
916 if (prev_coef_savings[k] > 0 || |
|
917 cpi->common.frame_type == KEY_FRAME) |
|
918 savings += prev_coef_savings[k]; |
|
919 } |
|
920 while (++k < ENTROPY_NODES); |
|
921 } |
|
922 while (++j < COEF_BANDS); |
|
923 } |
|
924 while (++i < BLOCK_TYPES); |
|
925 return savings; |
|
926 } |
|
927 |
|
928 static int default_coef_context_savings(VP8_COMP *cpi) |
|
929 { |
|
930 MACROBLOCK *const x = & cpi->mb; |
|
931 int savings = 0; |
|
932 int i = 0; |
|
933 do |
|
934 { |
|
935 int j = 0; |
|
936 do |
|
937 { |
|
938 int k = 0; |
|
939 do |
|
940 { |
|
941 /* at every context */ |
|
942 |
|
943 /* calc probs and branch cts for this frame only */ |
|
944 int t = 0; /* token/prob index */ |
|
945 |
|
946 vp8_tree_probs_from_distribution( |
|
947 MAX_ENTROPY_TOKENS, vp8_coef_encodings, vp8_coef_tree, |
|
948 cpi->frame_coef_probs [i][j][k], |
|
949 cpi->frame_branch_ct [i][j][k], |
|
950 x->coef_counts [i][j][k], |
|
951 256, 1 |
|
952 ); |
|
953 |
|
954 do |
|
955 { |
|
956 const unsigned int *ct = cpi->frame_branch_ct [i][j][k][t]; |
|
957 const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t]; |
|
958 const vp8_prob oldp = cpi->common.fc.coef_probs [i][j][k][t]; |
|
959 const vp8_prob upd = vp8_coef_update_probs [i][j][k][t]; |
|
960 const int s = prob_update_savings(ct, oldp, newp, upd); |
|
961 |
|
962 if (s > 0) |
|
963 { |
|
964 savings += s; |
|
965 } |
|
966 } |
|
967 while (++t < ENTROPY_NODES); |
|
968 } |
|
969 while (++k < PREV_COEF_CONTEXTS); |
|
970 } |
|
971 while (++j < COEF_BANDS); |
|
972 } |
|
973 while (++i < BLOCK_TYPES); |
|
974 return savings; |
|
975 } |
|
976 |
|
977 void vp8_calc_ref_frame_costs(int *ref_frame_cost, |
|
978 int prob_intra, |
|
979 int prob_last, |
|
980 int prob_garf |
|
981 ) |
|
982 { |
|
983 assert(prob_intra >= 0); |
|
984 assert(prob_intra <= 255); |
|
985 assert(prob_last >= 0); |
|
986 assert(prob_last <= 255); |
|
987 assert(prob_garf >= 0); |
|
988 assert(prob_garf <= 255); |
|
989 ref_frame_cost[INTRA_FRAME] = vp8_cost_zero(prob_intra); |
|
990 ref_frame_cost[LAST_FRAME] = vp8_cost_one(prob_intra) |
|
991 + vp8_cost_zero(prob_last); |
|
992 ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(prob_intra) |
|
993 + vp8_cost_one(prob_last) |
|
994 + vp8_cost_zero(prob_garf); |
|
995 ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(prob_intra) |
|
996 + vp8_cost_one(prob_last) |
|
997 + vp8_cost_one(prob_garf); |
|
998 |
|
999 } |
|
1000 |
|
1001 int vp8_estimate_entropy_savings(VP8_COMP *cpi) |
|
1002 { |
|
1003 int savings = 0; |
|
1004 |
|
1005 const int *const rfct = cpi->mb.count_mb_ref_frame_usage; |
|
1006 const int rf_intra = rfct[INTRA_FRAME]; |
|
1007 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]; |
|
1008 int new_intra, new_last, new_garf, oldtotal, newtotal; |
|
1009 int ref_frame_cost[MAX_REF_FRAMES]; |
|
1010 |
|
1011 vp8_clear_system_state(); |
|
1012 |
|
1013 if (cpi->common.frame_type != KEY_FRAME) |
|
1014 { |
|
1015 if (!(new_intra = rf_intra * 255 / (rf_intra + rf_inter))) |
|
1016 new_intra = 1; |
|
1017 |
|
1018 new_last = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128; |
|
1019 |
|
1020 new_garf = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) |
|
1021 ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128; |
|
1022 |
|
1023 |
|
1024 vp8_calc_ref_frame_costs(ref_frame_cost,new_intra,new_last,new_garf); |
|
1025 |
|
1026 newtotal = |
|
1027 rfct[INTRA_FRAME] * ref_frame_cost[INTRA_FRAME] + |
|
1028 rfct[LAST_FRAME] * ref_frame_cost[LAST_FRAME] + |
|
1029 rfct[GOLDEN_FRAME] * ref_frame_cost[GOLDEN_FRAME] + |
|
1030 rfct[ALTREF_FRAME] * ref_frame_cost[ALTREF_FRAME]; |
|
1031 |
|
1032 |
|
1033 /* old costs */ |
|
1034 vp8_calc_ref_frame_costs(ref_frame_cost,cpi->prob_intra_coded, |
|
1035 cpi->prob_last_coded,cpi->prob_gf_coded); |
|
1036 |
|
1037 oldtotal = |
|
1038 rfct[INTRA_FRAME] * ref_frame_cost[INTRA_FRAME] + |
|
1039 rfct[LAST_FRAME] * ref_frame_cost[LAST_FRAME] + |
|
1040 rfct[GOLDEN_FRAME] * ref_frame_cost[GOLDEN_FRAME] + |
|
1041 rfct[ALTREF_FRAME] * ref_frame_cost[ALTREF_FRAME]; |
|
1042 |
|
1043 savings += (oldtotal - newtotal) / 256; |
|
1044 } |
|
1045 |
|
1046 |
|
1047 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) |
|
1048 savings += independent_coef_context_savings(cpi); |
|
1049 else |
|
1050 savings += default_coef_context_savings(cpi); |
|
1051 |
|
1052 |
|
1053 return savings; |
|
1054 } |
|
1055 |
|
1056 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING |
|
1057 int vp8_update_coef_context(VP8_COMP *cpi) |
|
1058 { |
|
1059 int savings = 0; |
|
1060 |
|
1061 |
|
1062 if (cpi->common.frame_type == KEY_FRAME) |
|
1063 { |
|
1064 /* Reset to default counts/probabilities at key frames */ |
|
1065 vp8_copy(cpi->mb.coef_counts, default_coef_counts); |
|
1066 } |
|
1067 |
|
1068 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) |
|
1069 savings += independent_coef_context_savings(cpi); |
|
1070 else |
|
1071 savings += default_coef_context_savings(cpi); |
|
1072 |
|
1073 return savings; |
|
1074 } |
|
1075 #endif |
|
1076 |
|
1077 void vp8_update_coef_probs(VP8_COMP *cpi) |
|
1078 { |
|
1079 int i = 0; |
|
1080 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING) |
|
1081 vp8_writer *const w = cpi->bc; |
|
1082 #endif |
|
1083 int savings = 0; |
|
1084 |
|
1085 vp8_clear_system_state(); |
|
1086 |
|
1087 do |
|
1088 { |
|
1089 int j = 0; |
|
1090 |
|
1091 do |
|
1092 { |
|
1093 int k = 0; |
|
1094 int prev_coef_savings[ENTROPY_NODES] = {0}; |
|
1095 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) |
|
1096 { |
|
1097 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) |
|
1098 { |
|
1099 int t; /* token/prob index */ |
|
1100 for (t = 0; t < ENTROPY_NODES; ++t) |
|
1101 { |
|
1102 const unsigned int *ct = cpi->frame_branch_ct [i][j] |
|
1103 [k][t]; |
|
1104 const vp8_prob newp = cpi->frame_coef_probs[i][j][k][t]; |
|
1105 const vp8_prob oldp = cpi->common.fc.coef_probs[i][j] |
|
1106 [k][t]; |
|
1107 const vp8_prob upd = vp8_coef_update_probs[i][j][k][t]; |
|
1108 |
|
1109 prev_coef_savings[t] += |
|
1110 prob_update_savings(ct, oldp, newp, upd); |
|
1111 } |
|
1112 } |
|
1113 k = 0; |
|
1114 } |
|
1115 do |
|
1116 { |
|
1117 /* note: use result from vp8_estimate_entropy_savings, so no |
|
1118 * need to call vp8_tree_probs_from_distribution here. |
|
1119 */ |
|
1120 |
|
1121 /* at every context */ |
|
1122 |
|
1123 /* calc probs and branch cts for this frame only */ |
|
1124 int t = 0; /* token/prob index */ |
|
1125 |
|
1126 do |
|
1127 { |
|
1128 const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t]; |
|
1129 |
|
1130 vp8_prob *Pold = cpi->common.fc.coef_probs [i][j][k] + t; |
|
1131 const vp8_prob upd = vp8_coef_update_probs [i][j][k][t]; |
|
1132 |
|
1133 int s = prev_coef_savings[t]; |
|
1134 int u = 0; |
|
1135 |
|
1136 if (!(cpi->oxcf.error_resilient_mode & |
|
1137 VPX_ERROR_RESILIENT_PARTITIONS)) |
|
1138 { |
|
1139 s = prob_update_savings( |
|
1140 cpi->frame_branch_ct [i][j][k][t], |
|
1141 *Pold, newp, upd); |
|
1142 } |
|
1143 |
|
1144 if (s > 0) |
|
1145 u = 1; |
|
1146 |
|
1147 /* Force updates on key frames if the new is different, |
|
1148 * so that we can be sure we end up with equal probabilities |
|
1149 * over the prev coef contexts. |
|
1150 */ |
|
1151 if ((cpi->oxcf.error_resilient_mode & |
|
1152 VPX_ERROR_RESILIENT_PARTITIONS) && |
|
1153 cpi->common.frame_type == KEY_FRAME && newp != *Pold) |
|
1154 u = 1; |
|
1155 |
|
1156 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING |
|
1157 cpi->update_probs[i][j][k][t] = u; |
|
1158 #else |
|
1159 vp8_write(w, u, upd); |
|
1160 #endif |
|
1161 |
|
1162 |
|
1163 #ifdef VP8_ENTROPY_STATS |
|
1164 ++ tree_update_hist [i][j][k][t] [u]; |
|
1165 #endif |
|
1166 |
|
1167 if (u) |
|
1168 { |
|
1169 /* send/use new probability */ |
|
1170 |
|
1171 *Pold = newp; |
|
1172 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING) |
|
1173 vp8_write_literal(w, newp, 8); |
|
1174 #endif |
|
1175 |
|
1176 savings += s; |
|
1177 |
|
1178 } |
|
1179 |
|
1180 } |
|
1181 while (++t < ENTROPY_NODES); |
|
1182 |
|
1183 /* Accum token counts for generation of default statistics */ |
|
1184 #ifdef VP8_ENTROPY_STATS |
|
1185 t = 0; |
|
1186 |
|
1187 do |
|
1188 { |
|
1189 context_counters [i][j][k][t] += cpi->coef_counts [i][j][k][t]; |
|
1190 } |
|
1191 while (++t < MAX_ENTROPY_TOKENS); |
|
1192 |
|
1193 #endif |
|
1194 |
|
1195 } |
|
1196 while (++k < PREV_COEF_CONTEXTS); |
|
1197 } |
|
1198 while (++j < COEF_BANDS); |
|
1199 } |
|
1200 while (++i < BLOCK_TYPES); |
|
1201 |
|
1202 } |
|
1203 |
|
1204 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING |
|
1205 static void pack_coef_probs(VP8_COMP *cpi) |
|
1206 { |
|
1207 int i = 0; |
|
1208 vp8_writer *const w = cpi->bc; |
|
1209 |
|
1210 do |
|
1211 { |
|
1212 int j = 0; |
|
1213 |
|
1214 do |
|
1215 { |
|
1216 int k = 0; |
|
1217 |
|
1218 do |
|
1219 { |
|
1220 int t = 0; /* token/prob index */ |
|
1221 |
|
1222 do |
|
1223 { |
|
1224 const vp8_prob newp = cpi->common.fc.coef_probs [i][j][k][t]; |
|
1225 const vp8_prob upd = vp8_coef_update_probs [i][j][k][t]; |
|
1226 |
|
1227 const char u = cpi->update_probs[i][j][k][t] ; |
|
1228 |
|
1229 vp8_write(w, u, upd); |
|
1230 |
|
1231 if (u) |
|
1232 { |
|
1233 /* send/use new probability */ |
|
1234 vp8_write_literal(w, newp, 8); |
|
1235 } |
|
1236 } |
|
1237 while (++t < ENTROPY_NODES); |
|
1238 } |
|
1239 while (++k < PREV_COEF_CONTEXTS); |
|
1240 } |
|
1241 while (++j < COEF_BANDS); |
|
1242 } |
|
1243 while (++i < BLOCK_TYPES); |
|
1244 } |
|
1245 #endif |
|
1246 |
|
1247 #ifdef PACKET_TESTING |
|
1248 FILE *vpxlogc = 0; |
|
1249 #endif |
|
1250 |
|
1251 static void put_delta_q(vp8_writer *bc, int delta_q) |
|
1252 { |
|
1253 if (delta_q != 0) |
|
1254 { |
|
1255 vp8_write_bit(bc, 1); |
|
1256 vp8_write_literal(bc, abs(delta_q), 4); |
|
1257 |
|
1258 if (delta_q < 0) |
|
1259 vp8_write_bit(bc, 1); |
|
1260 else |
|
1261 vp8_write_bit(bc, 0); |
|
1262 } |
|
1263 else |
|
1264 vp8_write_bit(bc, 0); |
|
1265 } |
|
1266 |
|
1267 void vp8_pack_bitstream(VP8_COMP *cpi, unsigned char *dest, unsigned char * dest_end, unsigned long *size) |
|
1268 { |
|
1269 int i, j; |
|
1270 VP8_HEADER oh; |
|
1271 VP8_COMMON *const pc = & cpi->common; |
|
1272 vp8_writer *const bc = cpi->bc; |
|
1273 MACROBLOCKD *const xd = & cpi->mb.e_mbd; |
|
1274 int extra_bytes_packed = 0; |
|
1275 |
|
1276 unsigned char *cx_data = dest; |
|
1277 unsigned char *cx_data_end = dest_end; |
|
1278 const int *mb_feature_data_bits; |
|
1279 |
|
1280 oh.show_frame = (int) pc->show_frame; |
|
1281 oh.type = (int)pc->frame_type; |
|
1282 oh.version = pc->version; |
|
1283 oh.first_partition_length_in_bytes = 0; |
|
1284 |
|
1285 mb_feature_data_bits = vp8_mb_feature_data_bits; |
|
1286 |
|
1287 bc[0].error = &pc->error; |
|
1288 |
|
1289 validate_buffer(cx_data, 3, cx_data_end, &cpi->common.error); |
|
1290 cx_data += 3; |
|
1291 |
|
1292 #if defined(SECTIONBITS_OUTPUT) |
|
1293 Sectionbits[active_section = 1] += sizeof(VP8_HEADER) * 8 * 256; |
|
1294 #endif |
|
1295 |
|
1296 /* every keyframe send startcode, width, height, scale factor, clamp |
|
1297 * and color type |
|
1298 */ |
|
1299 if (oh.type == KEY_FRAME) |
|
1300 { |
|
1301 int v; |
|
1302 |
|
1303 validate_buffer(cx_data, 7, cx_data_end, &cpi->common.error); |
|
1304 |
|
1305 /* Start / synch code */ |
|
1306 cx_data[0] = 0x9D; |
|
1307 cx_data[1] = 0x01; |
|
1308 cx_data[2] = 0x2a; |
|
1309 |
|
1310 v = (pc->horiz_scale << 14) | pc->Width; |
|
1311 cx_data[3] = v; |
|
1312 cx_data[4] = v >> 8; |
|
1313 |
|
1314 v = (pc->vert_scale << 14) | pc->Height; |
|
1315 cx_data[5] = v; |
|
1316 cx_data[6] = v >> 8; |
|
1317 |
|
1318 |
|
1319 extra_bytes_packed = 7; |
|
1320 cx_data += extra_bytes_packed ; |
|
1321 |
|
1322 vp8_start_encode(bc, cx_data, cx_data_end); |
|
1323 |
|
1324 /* signal clr type */ |
|
1325 vp8_write_bit(bc, 0); |
|
1326 vp8_write_bit(bc, pc->clamp_type); |
|
1327 |
|
1328 } |
|
1329 else |
|
1330 vp8_start_encode(bc, cx_data, cx_data_end); |
|
1331 |
|
1332 |
|
1333 /* Signal whether or not Segmentation is enabled */ |
|
1334 vp8_write_bit(bc, xd->segmentation_enabled); |
|
1335 |
|
1336 /* Indicate which features are enabled */ |
|
1337 if (xd->segmentation_enabled) |
|
1338 { |
|
1339 /* Signal whether or not the segmentation map is being updated. */ |
|
1340 vp8_write_bit(bc, xd->update_mb_segmentation_map); |
|
1341 vp8_write_bit(bc, xd->update_mb_segmentation_data); |
|
1342 |
|
1343 if (xd->update_mb_segmentation_data) |
|
1344 { |
|
1345 signed char Data; |
|
1346 |
|
1347 vp8_write_bit(bc, xd->mb_segement_abs_delta); |
|
1348 |
|
1349 /* For each segmentation feature (Quant and loop filter level) */ |
|
1350 for (i = 0; i < MB_LVL_MAX; i++) |
|
1351 { |
|
1352 /* For each of the segments */ |
|
1353 for (j = 0; j < MAX_MB_SEGMENTS; j++) |
|
1354 { |
|
1355 Data = xd->segment_feature_data[i][j]; |
|
1356 |
|
1357 /* Frame level data */ |
|
1358 if (Data) |
|
1359 { |
|
1360 vp8_write_bit(bc, 1); |
|
1361 |
|
1362 if (Data < 0) |
|
1363 { |
|
1364 Data = - Data; |
|
1365 vp8_write_literal(bc, Data, mb_feature_data_bits[i]); |
|
1366 vp8_write_bit(bc, 1); |
|
1367 } |
|
1368 else |
|
1369 { |
|
1370 vp8_write_literal(bc, Data, mb_feature_data_bits[i]); |
|
1371 vp8_write_bit(bc, 0); |
|
1372 } |
|
1373 } |
|
1374 else |
|
1375 vp8_write_bit(bc, 0); |
|
1376 } |
|
1377 } |
|
1378 } |
|
1379 |
|
1380 if (xd->update_mb_segmentation_map) |
|
1381 { |
|
1382 /* Write the probs used to decode the segment id for each mb */ |
|
1383 for (i = 0; i < MB_FEATURE_TREE_PROBS; i++) |
|
1384 { |
|
1385 int Data = xd->mb_segment_tree_probs[i]; |
|
1386 |
|
1387 if (Data != 255) |
|
1388 { |
|
1389 vp8_write_bit(bc, 1); |
|
1390 vp8_write_literal(bc, Data, 8); |
|
1391 } |
|
1392 else |
|
1393 vp8_write_bit(bc, 0); |
|
1394 } |
|
1395 } |
|
1396 } |
|
1397 |
|
1398 vp8_write_bit(bc, pc->filter_type); |
|
1399 vp8_write_literal(bc, pc->filter_level, 6); |
|
1400 vp8_write_literal(bc, pc->sharpness_level, 3); |
|
1401 |
|
1402 /* Write out loop filter deltas applied at the MB level based on mode |
|
1403 * or ref frame (if they are enabled). |
|
1404 */ |
|
1405 vp8_write_bit(bc, xd->mode_ref_lf_delta_enabled); |
|
1406 |
|
1407 if (xd->mode_ref_lf_delta_enabled) |
|
1408 { |
|
1409 /* Do the deltas need to be updated */ |
|
1410 int send_update = xd->mode_ref_lf_delta_update |
|
1411 || cpi->oxcf.error_resilient_mode; |
|
1412 |
|
1413 vp8_write_bit(bc, send_update); |
|
1414 if (send_update) |
|
1415 { |
|
1416 int Data; |
|
1417 |
|
1418 /* Send update */ |
|
1419 for (i = 0; i < MAX_REF_LF_DELTAS; i++) |
|
1420 { |
|
1421 Data = xd->ref_lf_deltas[i]; |
|
1422 |
|
1423 /* Frame level data */ |
|
1424 if (xd->ref_lf_deltas[i] != xd->last_ref_lf_deltas[i] |
|
1425 || cpi->oxcf.error_resilient_mode) |
|
1426 { |
|
1427 xd->last_ref_lf_deltas[i] = xd->ref_lf_deltas[i]; |
|
1428 vp8_write_bit(bc, 1); |
|
1429 |
|
1430 if (Data > 0) |
|
1431 { |
|
1432 vp8_write_literal(bc, (Data & 0x3F), 6); |
|
1433 vp8_write_bit(bc, 0); /* sign */ |
|
1434 } |
|
1435 else |
|
1436 { |
|
1437 Data = -Data; |
|
1438 vp8_write_literal(bc, (Data & 0x3F), 6); |
|
1439 vp8_write_bit(bc, 1); /* sign */ |
|
1440 } |
|
1441 } |
|
1442 else |
|
1443 vp8_write_bit(bc, 0); |
|
1444 } |
|
1445 |
|
1446 /* Send update */ |
|
1447 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) |
|
1448 { |
|
1449 Data = xd->mode_lf_deltas[i]; |
|
1450 |
|
1451 if (xd->mode_lf_deltas[i] != xd->last_mode_lf_deltas[i] |
|
1452 || cpi->oxcf.error_resilient_mode) |
|
1453 { |
|
1454 xd->last_mode_lf_deltas[i] = xd->mode_lf_deltas[i]; |
|
1455 vp8_write_bit(bc, 1); |
|
1456 |
|
1457 if (Data > 0) |
|
1458 { |
|
1459 vp8_write_literal(bc, (Data & 0x3F), 6); |
|
1460 vp8_write_bit(bc, 0); /* sign */ |
|
1461 } |
|
1462 else |
|
1463 { |
|
1464 Data = -Data; |
|
1465 vp8_write_literal(bc, (Data & 0x3F), 6); |
|
1466 vp8_write_bit(bc, 1); /* sign */ |
|
1467 } |
|
1468 } |
|
1469 else |
|
1470 vp8_write_bit(bc, 0); |
|
1471 } |
|
1472 } |
|
1473 } |
|
1474 |
|
1475 /* signal here is multi token partition is enabled */ |
|
1476 vp8_write_literal(bc, pc->multi_token_partition, 2); |
|
1477 |
|
1478 /* Frame Qbaseline quantizer index */ |
|
1479 vp8_write_literal(bc, pc->base_qindex, 7); |
|
1480 |
|
1481 /* Transmit Dc, Second order and Uv quantizer delta information */ |
|
1482 put_delta_q(bc, pc->y1dc_delta_q); |
|
1483 put_delta_q(bc, pc->y2dc_delta_q); |
|
1484 put_delta_q(bc, pc->y2ac_delta_q); |
|
1485 put_delta_q(bc, pc->uvdc_delta_q); |
|
1486 put_delta_q(bc, pc->uvac_delta_q); |
|
1487 |
|
1488 /* When there is a key frame all reference buffers are updated using |
|
1489 * the new key frame |
|
1490 */ |
|
1491 if (pc->frame_type != KEY_FRAME) |
|
1492 { |
|
1493 /* Should the GF or ARF be updated using the transmitted frame |
|
1494 * or buffer |
|
1495 */ |
|
1496 vp8_write_bit(bc, pc->refresh_golden_frame); |
|
1497 vp8_write_bit(bc, pc->refresh_alt_ref_frame); |
|
1498 |
|
1499 /* If not being updated from current frame should either GF or ARF |
|
1500 * be updated from another buffer |
|
1501 */ |
|
1502 if (!pc->refresh_golden_frame) |
|
1503 vp8_write_literal(bc, pc->copy_buffer_to_gf, 2); |
|
1504 |
|
1505 if (!pc->refresh_alt_ref_frame) |
|
1506 vp8_write_literal(bc, pc->copy_buffer_to_arf, 2); |
|
1507 |
|
1508 /* Indicate reference frame sign bias for Golden and ARF frames |
|
1509 * (always 0 for last frame buffer) |
|
1510 */ |
|
1511 vp8_write_bit(bc, pc->ref_frame_sign_bias[GOLDEN_FRAME]); |
|
1512 vp8_write_bit(bc, pc->ref_frame_sign_bias[ALTREF_FRAME]); |
|
1513 } |
|
1514 |
|
1515 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING) |
|
1516 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) |
|
1517 { |
|
1518 if (pc->frame_type == KEY_FRAME) |
|
1519 pc->refresh_entropy_probs = 1; |
|
1520 else |
|
1521 pc->refresh_entropy_probs = 0; |
|
1522 } |
|
1523 #endif |
|
1524 |
|
1525 vp8_write_bit(bc, pc->refresh_entropy_probs); |
|
1526 |
|
1527 if (pc->frame_type != KEY_FRAME) |
|
1528 vp8_write_bit(bc, pc->refresh_last_frame); |
|
1529 |
|
1530 #ifdef VP8_ENTROPY_STATS |
|
1531 |
|
1532 if (pc->frame_type == INTER_FRAME) |
|
1533 active_section = 0; |
|
1534 else |
|
1535 active_section = 7; |
|
1536 |
|
1537 #endif |
|
1538 |
|
1539 vp8_clear_system_state(); |
|
1540 |
|
1541 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING |
|
1542 pack_coef_probs(cpi); |
|
1543 #else |
|
1544 if (pc->refresh_entropy_probs == 0) |
|
1545 { |
|
1546 /* save a copy for later refresh */ |
|
1547 vpx_memcpy(&cpi->common.lfc, &cpi->common.fc, sizeof(cpi->common.fc)); |
|
1548 } |
|
1549 |
|
1550 vp8_update_coef_probs(cpi); |
|
1551 #endif |
|
1552 |
|
1553 #ifdef VP8_ENTROPY_STATS |
|
1554 active_section = 2; |
|
1555 #endif |
|
1556 |
|
1557 /* Write out the mb_no_coeff_skip flag */ |
|
1558 vp8_write_bit(bc, pc->mb_no_coeff_skip); |
|
1559 |
|
1560 if (pc->frame_type == KEY_FRAME) |
|
1561 { |
|
1562 write_kfmodes(cpi); |
|
1563 |
|
1564 #ifdef VP8_ENTROPY_STATS |
|
1565 active_section = 8; |
|
1566 #endif |
|
1567 } |
|
1568 else |
|
1569 { |
|
1570 pack_inter_mode_mvs(cpi); |
|
1571 |
|
1572 #ifdef VP8_ENTROPY_STATS |
|
1573 active_section = 1; |
|
1574 #endif |
|
1575 } |
|
1576 |
|
1577 vp8_stop_encode(bc); |
|
1578 |
|
1579 cx_data += bc->pos; |
|
1580 |
|
1581 oh.first_partition_length_in_bytes = cpi->bc->pos; |
|
1582 |
|
1583 /* update frame tag */ |
|
1584 { |
|
1585 int v = (oh.first_partition_length_in_bytes << 5) | |
|
1586 (oh.show_frame << 4) | |
|
1587 (oh.version << 1) | |
|
1588 oh.type; |
|
1589 |
|
1590 dest[0] = v; |
|
1591 dest[1] = v >> 8; |
|
1592 dest[2] = v >> 16; |
|
1593 } |
|
1594 |
|
1595 *size = VP8_HEADER_SIZE + extra_bytes_packed + cpi->bc->pos; |
|
1596 |
|
1597 cpi->partition_sz[0] = *size; |
|
1598 |
|
1599 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING |
|
1600 { |
|
1601 const int num_part = (1 << pc->multi_token_partition); |
|
1602 unsigned char * dp = cpi->partition_d[0] + cpi->partition_sz[0]; |
|
1603 |
|
1604 if (num_part > 1) |
|
1605 { |
|
1606 /* write token part sizes (all but last) if more than 1 */ |
|
1607 validate_buffer(dp, 3 * (num_part - 1), cpi->partition_d_end[0], |
|
1608 &pc->error); |
|
1609 |
|
1610 cpi->partition_sz[0] += 3*(num_part-1); |
|
1611 |
|
1612 for(i = 1; i < num_part; i++) |
|
1613 { |
|
1614 write_partition_size(dp, cpi->partition_sz[i]); |
|
1615 dp += 3; |
|
1616 } |
|
1617 } |
|
1618 |
|
1619 if (!cpi->output_partition) |
|
1620 { |
|
1621 /* concatenate partition buffers */ |
|
1622 for(i = 0; i < num_part; i++) |
|
1623 { |
|
1624 vpx_memmove(dp, cpi->partition_d[i+1], cpi->partition_sz[i+1]); |
|
1625 cpi->partition_d[i+1] = dp; |
|
1626 dp += cpi->partition_sz[i+1]; |
|
1627 } |
|
1628 } |
|
1629 |
|
1630 /* update total size */ |
|
1631 *size = 0; |
|
1632 for(i = 0; i < num_part+1; i++) |
|
1633 { |
|
1634 *size += cpi->partition_sz[i]; |
|
1635 } |
|
1636 } |
|
1637 #else |
|
1638 if (pc->multi_token_partition != ONE_PARTITION) |
|
1639 { |
|
1640 int num_part = 1 << pc->multi_token_partition; |
|
1641 |
|
1642 /* partition size table at the end of first partition */ |
|
1643 cpi->partition_sz[0] += 3 * (num_part - 1); |
|
1644 *size += 3 * (num_part - 1); |
|
1645 |
|
1646 validate_buffer(cx_data, 3 * (num_part - 1), cx_data_end, |
|
1647 &pc->error); |
|
1648 |
|
1649 for(i = 1; i < num_part + 1; i++) |
|
1650 { |
|
1651 cpi->bc[i].error = &pc->error; |
|
1652 } |
|
1653 |
|
1654 pack_tokens_into_partitions(cpi, cx_data + 3 * (num_part - 1), |
|
1655 cx_data_end, num_part); |
|
1656 |
|
1657 for(i = 1; i < num_part; i++) |
|
1658 { |
|
1659 cpi->partition_sz[i] = cpi->bc[i].pos; |
|
1660 write_partition_size(cx_data, cpi->partition_sz[i]); |
|
1661 cx_data += 3; |
|
1662 *size += cpi->partition_sz[i]; /* add to total */ |
|
1663 } |
|
1664 |
|
1665 /* add last partition to total size */ |
|
1666 cpi->partition_sz[i] = cpi->bc[i].pos; |
|
1667 *size += cpi->partition_sz[i]; |
|
1668 } |
|
1669 else |
|
1670 { |
|
1671 bc[1].error = &pc->error; |
|
1672 |
|
1673 vp8_start_encode(&cpi->bc[1], cx_data, cx_data_end); |
|
1674 |
|
1675 #if CONFIG_MULTITHREAD |
|
1676 if (cpi->b_multi_threaded) |
|
1677 pack_mb_row_tokens(cpi, &cpi->bc[1]); |
|
1678 else |
|
1679 #endif |
|
1680 pack_tokens(&cpi->bc[1], cpi->tok, cpi->tok_count); |
|
1681 |
|
1682 vp8_stop_encode(&cpi->bc[1]); |
|
1683 |
|
1684 *size += cpi->bc[1].pos; |
|
1685 cpi->partition_sz[1] = cpi->bc[1].pos; |
|
1686 } |
|
1687 #endif |
|
1688 } |
|
1689 |
|
1690 #ifdef VP8_ENTROPY_STATS |
|
1691 void print_tree_update_probs() |
|
1692 { |
|
1693 int i, j, k, l; |
|
1694 FILE *f = fopen("context.c", "a"); |
|
1695 int Sum; |
|
1696 fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n"); |
|
1697 fprintf(f, "const vp8_prob tree_update_probs[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [ENTROPY_NODES] = {\n"); |
|
1698 |
|
1699 for (i = 0; i < BLOCK_TYPES; i++) |
|
1700 { |
|
1701 fprintf(f, " { \n"); |
|
1702 |
|
1703 for (j = 0; j < COEF_BANDS; j++) |
|
1704 { |
|
1705 fprintf(f, " {\n"); |
|
1706 |
|
1707 for (k = 0; k < PREV_COEF_CONTEXTS; k++) |
|
1708 { |
|
1709 fprintf(f, " {"); |
|
1710 |
|
1711 for (l = 0; l < ENTROPY_NODES; l++) |
|
1712 { |
|
1713 Sum = tree_update_hist[i][j][k][l][0] + tree_update_hist[i][j][k][l][1]; |
|
1714 |
|
1715 if (Sum > 0) |
|
1716 { |
|
1717 if (((tree_update_hist[i][j][k][l][0] * 255) / Sum) > 0) |
|
1718 fprintf(f, "%3ld, ", (tree_update_hist[i][j][k][l][0] * 255) / Sum); |
|
1719 else |
|
1720 fprintf(f, "%3ld, ", 1); |
|
1721 } |
|
1722 else |
|
1723 fprintf(f, "%3ld, ", 128); |
|
1724 } |
|
1725 |
|
1726 fprintf(f, "},\n"); |
|
1727 } |
|
1728 |
|
1729 fprintf(f, " },\n"); |
|
1730 } |
|
1731 |
|
1732 fprintf(f, " },\n"); |
|
1733 } |
|
1734 |
|
1735 fprintf(f, "};\n"); |
|
1736 fclose(f); |
|
1737 } |
|
1738 #endif |